![]() |
OpenCV
4.10.0
Open Source Computer Vision
|
Macros | |
| #define | vaadd_vv_i16m1(...) __riscv_vaadd_vv_i16m1(__VA_ARGS__) |
| #define | vaadd_vv_i16m1_m(...) __riscv_vaadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i16m2(...) __riscv_vaadd_vv_i16m2(__VA_ARGS__) |
| #define | vaadd_vv_i16m2_m(...) __riscv_vaadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i16m4(...) __riscv_vaadd_vv_i16m4(__VA_ARGS__) |
| #define | vaadd_vv_i16m4_m(...) __riscv_vaadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i16m8(...) __riscv_vaadd_vv_i16m8(__VA_ARGS__) |
| #define | vaadd_vv_i16m8_m(...) __riscv_vaadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i16mf2(...) __riscv_vaadd_vv_i16mf2(__VA_ARGS__) |
| #define | vaadd_vv_i16mf2_m(...) __riscv_vaadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i16mf4(...) __riscv_vaadd_vv_i16mf4(__VA_ARGS__) |
| #define | vaadd_vv_i16mf4_m(...) __riscv_vaadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i32m1(...) __riscv_vaadd_vv_i32m1(__VA_ARGS__) |
| #define | vaadd_vv_i32m1_m(...) __riscv_vaadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i32m2(...) __riscv_vaadd_vv_i32m2(__VA_ARGS__) |
| #define | vaadd_vv_i32m2_m(...) __riscv_vaadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i32m4(...) __riscv_vaadd_vv_i32m4(__VA_ARGS__) |
| #define | vaadd_vv_i32m4_m(...) __riscv_vaadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i32m8(...) __riscv_vaadd_vv_i32m8(__VA_ARGS__) |
| #define | vaadd_vv_i32m8_m(...) __riscv_vaadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i32mf2(...) __riscv_vaadd_vv_i32mf2(__VA_ARGS__) |
| #define | vaadd_vv_i32mf2_m(...) __riscv_vaadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i64m1(...) __riscv_vaadd_vv_i64m1(__VA_ARGS__) |
| #define | vaadd_vv_i64m1_m(...) __riscv_vaadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i64m2(...) __riscv_vaadd_vv_i64m2(__VA_ARGS__) |
| #define | vaadd_vv_i64m2_m(...) __riscv_vaadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i64m4(...) __riscv_vaadd_vv_i64m4(__VA_ARGS__) |
| #define | vaadd_vv_i64m4_m(...) __riscv_vaadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i64m8(...) __riscv_vaadd_vv_i64m8(__VA_ARGS__) |
| #define | vaadd_vv_i64m8_m(...) __riscv_vaadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i8m1(...) __riscv_vaadd_vv_i8m1(__VA_ARGS__) |
| #define | vaadd_vv_i8m1_m(...) __riscv_vaadd_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i8m2(...) __riscv_vaadd_vv_i8m2(__VA_ARGS__) |
| #define | vaadd_vv_i8m2_m(...) __riscv_vaadd_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i8m4(...) __riscv_vaadd_vv_i8m4(__VA_ARGS__) |
| #define | vaadd_vv_i8m4_m(...) __riscv_vaadd_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i8m8(...) __riscv_vaadd_vv_i8m8(__VA_ARGS__) |
| #define | vaadd_vv_i8m8_m(...) __riscv_vaadd_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i8mf2(...) __riscv_vaadd_vv_i8mf2(__VA_ARGS__) |
| #define | vaadd_vv_i8mf2_m(...) __riscv_vaadd_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i8mf4(...) __riscv_vaadd_vv_i8mf4(__VA_ARGS__) |
| #define | vaadd_vv_i8mf4_m(...) __riscv_vaadd_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vaadd_vv_i8mf8(...) __riscv_vaadd_vv_i8mf8(__VA_ARGS__) |
| #define | vaadd_vv_i8mf8_m(...) __riscv_vaadd_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i16m1(...) __riscv_vaadd_vx_i16m1(__VA_ARGS__) |
| #define | vaadd_vx_i16m1_m(...) __riscv_vaadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i16m2(...) __riscv_vaadd_vx_i16m2(__VA_ARGS__) |
| #define | vaadd_vx_i16m2_m(...) __riscv_vaadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i16m4(...) __riscv_vaadd_vx_i16m4(__VA_ARGS__) |
| #define | vaadd_vx_i16m4_m(...) __riscv_vaadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i16m8(...) __riscv_vaadd_vx_i16m8(__VA_ARGS__) |
| #define | vaadd_vx_i16m8_m(...) __riscv_vaadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i16mf2(...) __riscv_vaadd_vx_i16mf2(__VA_ARGS__) |
| #define | vaadd_vx_i16mf2_m(...) __riscv_vaadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i16mf4(...) __riscv_vaadd_vx_i16mf4(__VA_ARGS__) |
| #define | vaadd_vx_i16mf4_m(...) __riscv_vaadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i32m1(...) __riscv_vaadd_vx_i32m1(__VA_ARGS__) |
| #define | vaadd_vx_i32m1_m(...) __riscv_vaadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i32m2(...) __riscv_vaadd_vx_i32m2(__VA_ARGS__) |
| #define | vaadd_vx_i32m2_m(...) __riscv_vaadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i32m4(...) __riscv_vaadd_vx_i32m4(__VA_ARGS__) |
| #define | vaadd_vx_i32m4_m(...) __riscv_vaadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i32m8(...) __riscv_vaadd_vx_i32m8(__VA_ARGS__) |
| #define | vaadd_vx_i32m8_m(...) __riscv_vaadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i32mf2(...) __riscv_vaadd_vx_i32mf2(__VA_ARGS__) |
| #define | vaadd_vx_i32mf2_m(...) __riscv_vaadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i64m1(...) __riscv_vaadd_vx_i64m1(__VA_ARGS__) |
| #define | vaadd_vx_i64m1_m(...) __riscv_vaadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i64m2(...) __riscv_vaadd_vx_i64m2(__VA_ARGS__) |
| #define | vaadd_vx_i64m2_m(...) __riscv_vaadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i64m4(...) __riscv_vaadd_vx_i64m4(__VA_ARGS__) |
| #define | vaadd_vx_i64m4_m(...) __riscv_vaadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i64m8(...) __riscv_vaadd_vx_i64m8(__VA_ARGS__) |
| #define | vaadd_vx_i64m8_m(...) __riscv_vaadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i8m1(...) __riscv_vaadd_vx_i8m1(__VA_ARGS__) |
| #define | vaadd_vx_i8m1_m(...) __riscv_vaadd_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i8m2(...) __riscv_vaadd_vx_i8m2(__VA_ARGS__) |
| #define | vaadd_vx_i8m2_m(...) __riscv_vaadd_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i8m4(...) __riscv_vaadd_vx_i8m4(__VA_ARGS__) |
| #define | vaadd_vx_i8m4_m(...) __riscv_vaadd_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i8m8(...) __riscv_vaadd_vx_i8m8(__VA_ARGS__) |
| #define | vaadd_vx_i8m8_m(...) __riscv_vaadd_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i8mf2(...) __riscv_vaadd_vx_i8mf2(__VA_ARGS__) |
| #define | vaadd_vx_i8mf2_m(...) __riscv_vaadd_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i8mf4(...) __riscv_vaadd_vx_i8mf4(__VA_ARGS__) |
| #define | vaadd_vx_i8mf4_m(...) __riscv_vaadd_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vaadd_vx_i8mf8(...) __riscv_vaadd_vx_i8mf8(__VA_ARGS__) |
| #define | vaadd_vx_i8mf8_m(...) __riscv_vaadd_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u16m1(...) __riscv_vaaddu_vv_u16m1(__VA_ARGS__) |
| #define | vaaddu_vv_u16m1_m(...) __riscv_vaaddu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u16m2(...) __riscv_vaaddu_vv_u16m2(__VA_ARGS__) |
| #define | vaaddu_vv_u16m2_m(...) __riscv_vaaddu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u16m4(...) __riscv_vaaddu_vv_u16m4(__VA_ARGS__) |
| #define | vaaddu_vv_u16m4_m(...) __riscv_vaaddu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u16m8(...) __riscv_vaaddu_vv_u16m8(__VA_ARGS__) |
| #define | vaaddu_vv_u16m8_m(...) __riscv_vaaddu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u16mf2(...) __riscv_vaaddu_vv_u16mf2(__VA_ARGS__) |
| #define | vaaddu_vv_u16mf2_m(...) __riscv_vaaddu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u16mf4(...) __riscv_vaaddu_vv_u16mf4(__VA_ARGS__) |
| #define | vaaddu_vv_u16mf4_m(...) __riscv_vaaddu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u32m1(...) __riscv_vaaddu_vv_u32m1(__VA_ARGS__) |
| #define | vaaddu_vv_u32m1_m(...) __riscv_vaaddu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u32m2(...) __riscv_vaaddu_vv_u32m2(__VA_ARGS__) |
| #define | vaaddu_vv_u32m2_m(...) __riscv_vaaddu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u32m4(...) __riscv_vaaddu_vv_u32m4(__VA_ARGS__) |
| #define | vaaddu_vv_u32m4_m(...) __riscv_vaaddu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u32m8(...) __riscv_vaaddu_vv_u32m8(__VA_ARGS__) |
| #define | vaaddu_vv_u32m8_m(...) __riscv_vaaddu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u32mf2(...) __riscv_vaaddu_vv_u32mf2(__VA_ARGS__) |
| #define | vaaddu_vv_u32mf2_m(...) __riscv_vaaddu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u64m1(...) __riscv_vaaddu_vv_u64m1(__VA_ARGS__) |
| #define | vaaddu_vv_u64m1_m(...) __riscv_vaaddu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u64m2(...) __riscv_vaaddu_vv_u64m2(__VA_ARGS__) |
| #define | vaaddu_vv_u64m2_m(...) __riscv_vaaddu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u64m4(...) __riscv_vaaddu_vv_u64m4(__VA_ARGS__) |
| #define | vaaddu_vv_u64m4_m(...) __riscv_vaaddu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u64m8(...) __riscv_vaaddu_vv_u64m8(__VA_ARGS__) |
| #define | vaaddu_vv_u64m8_m(...) __riscv_vaaddu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u8m1(...) __riscv_vaaddu_vv_u8m1(__VA_ARGS__) |
| #define | vaaddu_vv_u8m1_m(...) __riscv_vaaddu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u8m2(...) __riscv_vaaddu_vv_u8m2(__VA_ARGS__) |
| #define | vaaddu_vv_u8m2_m(...) __riscv_vaaddu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u8m4(...) __riscv_vaaddu_vv_u8m4(__VA_ARGS__) |
| #define | vaaddu_vv_u8m4_m(...) __riscv_vaaddu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u8m8(...) __riscv_vaaddu_vv_u8m8(__VA_ARGS__) |
| #define | vaaddu_vv_u8m8_m(...) __riscv_vaaddu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u8mf2(...) __riscv_vaaddu_vv_u8mf2(__VA_ARGS__) |
| #define | vaaddu_vv_u8mf2_m(...) __riscv_vaaddu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u8mf4(...) __riscv_vaaddu_vv_u8mf4(__VA_ARGS__) |
| #define | vaaddu_vv_u8mf4_m(...) __riscv_vaaddu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vaaddu_vv_u8mf8(...) __riscv_vaaddu_vv_u8mf8(__VA_ARGS__) |
| #define | vaaddu_vv_u8mf8_m(...) __riscv_vaaddu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u16m1(...) __riscv_vaaddu_vx_u16m1(__VA_ARGS__) |
| #define | vaaddu_vx_u16m1_m(...) __riscv_vaaddu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u16m2(...) __riscv_vaaddu_vx_u16m2(__VA_ARGS__) |
| #define | vaaddu_vx_u16m2_m(...) __riscv_vaaddu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u16m4(...) __riscv_vaaddu_vx_u16m4(__VA_ARGS__) |
| #define | vaaddu_vx_u16m4_m(...) __riscv_vaaddu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u16m8(...) __riscv_vaaddu_vx_u16m8(__VA_ARGS__) |
| #define | vaaddu_vx_u16m8_m(...) __riscv_vaaddu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u16mf2(...) __riscv_vaaddu_vx_u16mf2(__VA_ARGS__) |
| #define | vaaddu_vx_u16mf2_m(...) __riscv_vaaddu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u16mf4(...) __riscv_vaaddu_vx_u16mf4(__VA_ARGS__) |
| #define | vaaddu_vx_u16mf4_m(...) __riscv_vaaddu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u32m1(...) __riscv_vaaddu_vx_u32m1(__VA_ARGS__) |
| #define | vaaddu_vx_u32m1_m(...) __riscv_vaaddu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u32m2(...) __riscv_vaaddu_vx_u32m2(__VA_ARGS__) |
| #define | vaaddu_vx_u32m2_m(...) __riscv_vaaddu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u32m4(...) __riscv_vaaddu_vx_u32m4(__VA_ARGS__) |
| #define | vaaddu_vx_u32m4_m(...) __riscv_vaaddu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u32m8(...) __riscv_vaaddu_vx_u32m8(__VA_ARGS__) |
| #define | vaaddu_vx_u32m8_m(...) __riscv_vaaddu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u32mf2(...) __riscv_vaaddu_vx_u32mf2(__VA_ARGS__) |
| #define | vaaddu_vx_u32mf2_m(...) __riscv_vaaddu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u64m1(...) __riscv_vaaddu_vx_u64m1(__VA_ARGS__) |
| #define | vaaddu_vx_u64m1_m(...) __riscv_vaaddu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u64m2(...) __riscv_vaaddu_vx_u64m2(__VA_ARGS__) |
| #define | vaaddu_vx_u64m2_m(...) __riscv_vaaddu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u64m4(...) __riscv_vaaddu_vx_u64m4(__VA_ARGS__) |
| #define | vaaddu_vx_u64m4_m(...) __riscv_vaaddu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u64m8(...) __riscv_vaaddu_vx_u64m8(__VA_ARGS__) |
| #define | vaaddu_vx_u64m8_m(...) __riscv_vaaddu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u8m1(...) __riscv_vaaddu_vx_u8m1(__VA_ARGS__) |
| #define | vaaddu_vx_u8m1_m(...) __riscv_vaaddu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u8m2(...) __riscv_vaaddu_vx_u8m2(__VA_ARGS__) |
| #define | vaaddu_vx_u8m2_m(...) __riscv_vaaddu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u8m4(...) __riscv_vaaddu_vx_u8m4(__VA_ARGS__) |
| #define | vaaddu_vx_u8m4_m(...) __riscv_vaaddu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u8m8(...) __riscv_vaaddu_vx_u8m8(__VA_ARGS__) |
| #define | vaaddu_vx_u8m8_m(...) __riscv_vaaddu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u8mf2(...) __riscv_vaaddu_vx_u8mf2(__VA_ARGS__) |
| #define | vaaddu_vx_u8mf2_m(...) __riscv_vaaddu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u8mf4(...) __riscv_vaaddu_vx_u8mf4(__VA_ARGS__) |
| #define | vaaddu_vx_u8mf4_m(...) __riscv_vaaddu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vaaddu_vx_u8mf8(...) __riscv_vaaddu_vx_u8mf8(__VA_ARGS__) |
| #define | vaaddu_vx_u8mf8_m(...) __riscv_vaaddu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vadc_vvm_i16m1(...) __riscv_vadc_vvm_i16m1(__VA_ARGS__) |
| #define | vadc_vvm_i16m2(...) __riscv_vadc_vvm_i16m2(__VA_ARGS__) |
| #define | vadc_vvm_i16m4(...) __riscv_vadc_vvm_i16m4(__VA_ARGS__) |
| #define | vadc_vvm_i16m8(...) __riscv_vadc_vvm_i16m8(__VA_ARGS__) |
| #define | vadc_vvm_i16mf2(...) __riscv_vadc_vvm_i16mf2(__VA_ARGS__) |
| #define | vadc_vvm_i16mf4(...) __riscv_vadc_vvm_i16mf4(__VA_ARGS__) |
| #define | vadc_vvm_i32m1(...) __riscv_vadc_vvm_i32m1(__VA_ARGS__) |
| #define | vadc_vvm_i32m2(...) __riscv_vadc_vvm_i32m2(__VA_ARGS__) |
| #define | vadc_vvm_i32m4(...) __riscv_vadc_vvm_i32m4(__VA_ARGS__) |
| #define | vadc_vvm_i32m8(...) __riscv_vadc_vvm_i32m8(__VA_ARGS__) |
| #define | vadc_vvm_i32mf2(...) __riscv_vadc_vvm_i32mf2(__VA_ARGS__) |
| #define | vadc_vvm_i64m1(...) __riscv_vadc_vvm_i64m1(__VA_ARGS__) |
| #define | vadc_vvm_i64m2(...) __riscv_vadc_vvm_i64m2(__VA_ARGS__) |
| #define | vadc_vvm_i64m4(...) __riscv_vadc_vvm_i64m4(__VA_ARGS__) |
| #define | vadc_vvm_i64m8(...) __riscv_vadc_vvm_i64m8(__VA_ARGS__) |
| #define | vadc_vvm_i8m1(...) __riscv_vadc_vvm_i8m1(__VA_ARGS__) |
| #define | vadc_vvm_i8m2(...) __riscv_vadc_vvm_i8m2(__VA_ARGS__) |
| #define | vadc_vvm_i8m4(...) __riscv_vadc_vvm_i8m4(__VA_ARGS__) |
| #define | vadc_vvm_i8m8(...) __riscv_vadc_vvm_i8m8(__VA_ARGS__) |
| #define | vadc_vvm_i8mf2(...) __riscv_vadc_vvm_i8mf2(__VA_ARGS__) |
| #define | vadc_vvm_i8mf4(...) __riscv_vadc_vvm_i8mf4(__VA_ARGS__) |
| #define | vadc_vvm_i8mf8(...) __riscv_vadc_vvm_i8mf8(__VA_ARGS__) |
| #define | vadc_vvm_u16m1(...) __riscv_vadc_vvm_u16m1(__VA_ARGS__) |
| #define | vadc_vvm_u16m2(...) __riscv_vadc_vvm_u16m2(__VA_ARGS__) |
| #define | vadc_vvm_u16m4(...) __riscv_vadc_vvm_u16m4(__VA_ARGS__) |
| #define | vadc_vvm_u16m8(...) __riscv_vadc_vvm_u16m8(__VA_ARGS__) |
| #define | vadc_vvm_u16mf2(...) __riscv_vadc_vvm_u16mf2(__VA_ARGS__) |
| #define | vadc_vvm_u16mf4(...) __riscv_vadc_vvm_u16mf4(__VA_ARGS__) |
| #define | vadc_vvm_u32m1(...) __riscv_vadc_vvm_u32m1(__VA_ARGS__) |
| #define | vadc_vvm_u32m2(...) __riscv_vadc_vvm_u32m2(__VA_ARGS__) |
| #define | vadc_vvm_u32m4(...) __riscv_vadc_vvm_u32m4(__VA_ARGS__) |
| #define | vadc_vvm_u32m8(...) __riscv_vadc_vvm_u32m8(__VA_ARGS__) |
| #define | vadc_vvm_u32mf2(...) __riscv_vadc_vvm_u32mf2(__VA_ARGS__) |
| #define | vadc_vvm_u64m1(...) __riscv_vadc_vvm_u64m1(__VA_ARGS__) |
| #define | vadc_vvm_u64m2(...) __riscv_vadc_vvm_u64m2(__VA_ARGS__) |
| #define | vadc_vvm_u64m4(...) __riscv_vadc_vvm_u64m4(__VA_ARGS__) |
| #define | vadc_vvm_u64m8(...) __riscv_vadc_vvm_u64m8(__VA_ARGS__) |
| #define | vadc_vvm_u8m1(...) __riscv_vadc_vvm_u8m1(__VA_ARGS__) |
| #define | vadc_vvm_u8m2(...) __riscv_vadc_vvm_u8m2(__VA_ARGS__) |
| #define | vadc_vvm_u8m4(...) __riscv_vadc_vvm_u8m4(__VA_ARGS__) |
| #define | vadc_vvm_u8m8(...) __riscv_vadc_vvm_u8m8(__VA_ARGS__) |
| #define | vadc_vvm_u8mf2(...) __riscv_vadc_vvm_u8mf2(__VA_ARGS__) |
| #define | vadc_vvm_u8mf4(...) __riscv_vadc_vvm_u8mf4(__VA_ARGS__) |
| #define | vadc_vvm_u8mf8(...) __riscv_vadc_vvm_u8mf8(__VA_ARGS__) |
| #define | vadc_vxm_i16m1(...) __riscv_vadc_vxm_i16m1(__VA_ARGS__) |
| #define | vadc_vxm_i16m2(...) __riscv_vadc_vxm_i16m2(__VA_ARGS__) |
| #define | vadc_vxm_i16m4(...) __riscv_vadc_vxm_i16m4(__VA_ARGS__) |
| #define | vadc_vxm_i16m8(...) __riscv_vadc_vxm_i16m8(__VA_ARGS__) |
| #define | vadc_vxm_i16mf2(...) __riscv_vadc_vxm_i16mf2(__VA_ARGS__) |
| #define | vadc_vxm_i16mf4(...) __riscv_vadc_vxm_i16mf4(__VA_ARGS__) |
| #define | vadc_vxm_i32m1(...) __riscv_vadc_vxm_i32m1(__VA_ARGS__) |
| #define | vadc_vxm_i32m2(...) __riscv_vadc_vxm_i32m2(__VA_ARGS__) |
| #define | vadc_vxm_i32m4(...) __riscv_vadc_vxm_i32m4(__VA_ARGS__) |
| #define | vadc_vxm_i32m8(...) __riscv_vadc_vxm_i32m8(__VA_ARGS__) |
| #define | vadc_vxm_i32mf2(...) __riscv_vadc_vxm_i32mf2(__VA_ARGS__) |
| #define | vadc_vxm_i64m1(...) __riscv_vadc_vxm_i64m1(__VA_ARGS__) |
| #define | vadc_vxm_i64m2(...) __riscv_vadc_vxm_i64m2(__VA_ARGS__) |
| #define | vadc_vxm_i64m4(...) __riscv_vadc_vxm_i64m4(__VA_ARGS__) |
| #define | vadc_vxm_i64m8(...) __riscv_vadc_vxm_i64m8(__VA_ARGS__) |
| #define | vadc_vxm_i8m1(...) __riscv_vadc_vxm_i8m1(__VA_ARGS__) |
| #define | vadc_vxm_i8m2(...) __riscv_vadc_vxm_i8m2(__VA_ARGS__) |
| #define | vadc_vxm_i8m4(...) __riscv_vadc_vxm_i8m4(__VA_ARGS__) |
| #define | vadc_vxm_i8m8(...) __riscv_vadc_vxm_i8m8(__VA_ARGS__) |
| #define | vadc_vxm_i8mf2(...) __riscv_vadc_vxm_i8mf2(__VA_ARGS__) |
| #define | vadc_vxm_i8mf4(...) __riscv_vadc_vxm_i8mf4(__VA_ARGS__) |
| #define | vadc_vxm_i8mf8(...) __riscv_vadc_vxm_i8mf8(__VA_ARGS__) |
| #define | vadc_vxm_u16m1(...) __riscv_vadc_vxm_u16m1(__VA_ARGS__) |
| #define | vadc_vxm_u16m2(...) __riscv_vadc_vxm_u16m2(__VA_ARGS__) |
| #define | vadc_vxm_u16m4(...) __riscv_vadc_vxm_u16m4(__VA_ARGS__) |
| #define | vadc_vxm_u16m8(...) __riscv_vadc_vxm_u16m8(__VA_ARGS__) |
| #define | vadc_vxm_u16mf2(...) __riscv_vadc_vxm_u16mf2(__VA_ARGS__) |
| #define | vadc_vxm_u16mf4(...) __riscv_vadc_vxm_u16mf4(__VA_ARGS__) |
| #define | vadc_vxm_u32m1(...) __riscv_vadc_vxm_u32m1(__VA_ARGS__) |
| #define | vadc_vxm_u32m2(...) __riscv_vadc_vxm_u32m2(__VA_ARGS__) |
| #define | vadc_vxm_u32m4(...) __riscv_vadc_vxm_u32m4(__VA_ARGS__) |
| #define | vadc_vxm_u32m8(...) __riscv_vadc_vxm_u32m8(__VA_ARGS__) |
| #define | vadc_vxm_u32mf2(...) __riscv_vadc_vxm_u32mf2(__VA_ARGS__) |
| #define | vadc_vxm_u64m1(...) __riscv_vadc_vxm_u64m1(__VA_ARGS__) |
| #define | vadc_vxm_u64m2(...) __riscv_vadc_vxm_u64m2(__VA_ARGS__) |
| #define | vadc_vxm_u64m4(...) __riscv_vadc_vxm_u64m4(__VA_ARGS__) |
| #define | vadc_vxm_u64m8(...) __riscv_vadc_vxm_u64m8(__VA_ARGS__) |
| #define | vadc_vxm_u8m1(...) __riscv_vadc_vxm_u8m1(__VA_ARGS__) |
| #define | vadc_vxm_u8m2(...) __riscv_vadc_vxm_u8m2(__VA_ARGS__) |
| #define | vadc_vxm_u8m4(...) __riscv_vadc_vxm_u8m4(__VA_ARGS__) |
| #define | vadc_vxm_u8m8(...) __riscv_vadc_vxm_u8m8(__VA_ARGS__) |
| #define | vadc_vxm_u8mf2(...) __riscv_vadc_vxm_u8mf2(__VA_ARGS__) |
| #define | vadc_vxm_u8mf4(...) __riscv_vadc_vxm_u8mf4(__VA_ARGS__) |
| #define | vadc_vxm_u8mf8(...) __riscv_vadc_vxm_u8mf8(__VA_ARGS__) |
| #define | vadd_vv_i16m1(...) __riscv_vadd_vv_i16m1(__VA_ARGS__) |
| #define | vadd_vv_i16m1_m(...) __riscv_vadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vadd_vv_i16m2(...) __riscv_vadd_vv_i16m2(__VA_ARGS__) |
| #define | vadd_vv_i16m2_m(...) __riscv_vadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vadd_vv_i16m4(...) __riscv_vadd_vv_i16m4(__VA_ARGS__) |
| #define | vadd_vv_i16m4_m(...) __riscv_vadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vadd_vv_i16m8(...) __riscv_vadd_vv_i16m8(__VA_ARGS__) |
| #define | vadd_vv_i16m8_m(...) __riscv_vadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vadd_vv_i16mf2(...) __riscv_vadd_vv_i16mf2(__VA_ARGS__) |
| #define | vadd_vv_i16mf2_m(...) __riscv_vadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vadd_vv_i16mf4(...) __riscv_vadd_vv_i16mf4(__VA_ARGS__) |
| #define | vadd_vv_i16mf4_m(...) __riscv_vadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vadd_vv_i32m1(...) __riscv_vadd_vv_i32m1(__VA_ARGS__) |
| #define | vadd_vv_i32m1_m(...) __riscv_vadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vadd_vv_i32m2(...) __riscv_vadd_vv_i32m2(__VA_ARGS__) |
| #define | vadd_vv_i32m2_m(...) __riscv_vadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vadd_vv_i32m4(...) __riscv_vadd_vv_i32m4(__VA_ARGS__) |
| #define | vadd_vv_i32m4_m(...) __riscv_vadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vadd_vv_i32m8(...) __riscv_vadd_vv_i32m8(__VA_ARGS__) |
| #define | vadd_vv_i32m8_m(...) __riscv_vadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vadd_vv_i32mf2(...) __riscv_vadd_vv_i32mf2(__VA_ARGS__) |
| #define | vadd_vv_i32mf2_m(...) __riscv_vadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vadd_vv_i64m1(...) __riscv_vadd_vv_i64m1(__VA_ARGS__) |
| #define | vadd_vv_i64m1_m(...) __riscv_vadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vadd_vv_i64m2(...) __riscv_vadd_vv_i64m2(__VA_ARGS__) |
| #define | vadd_vv_i64m2_m(...) __riscv_vadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vadd_vv_i64m4(...) __riscv_vadd_vv_i64m4(__VA_ARGS__) |
| #define | vadd_vv_i64m4_m(...) __riscv_vadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vadd_vv_i64m8(...) __riscv_vadd_vv_i64m8(__VA_ARGS__) |
| #define | vadd_vv_i64m8_m(...) __riscv_vadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vadd_vv_i8m1(...) __riscv_vadd_vv_i8m1(__VA_ARGS__) |
| #define | vadd_vv_i8m1_m(...) __riscv_vadd_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vadd_vv_i8m2(...) __riscv_vadd_vv_i8m2(__VA_ARGS__) |
| #define | vadd_vv_i8m2_m(...) __riscv_vadd_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vadd_vv_i8m4(...) __riscv_vadd_vv_i8m4(__VA_ARGS__) |
| #define | vadd_vv_i8m4_m(...) __riscv_vadd_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vadd_vv_i8m8(...) __riscv_vadd_vv_i8m8(__VA_ARGS__) |
| #define | vadd_vv_i8m8_m(...) __riscv_vadd_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vadd_vv_i8mf2(...) __riscv_vadd_vv_i8mf2(__VA_ARGS__) |
| #define | vadd_vv_i8mf2_m(...) __riscv_vadd_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vadd_vv_i8mf4(...) __riscv_vadd_vv_i8mf4(__VA_ARGS__) |
| #define | vadd_vv_i8mf4_m(...) __riscv_vadd_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vadd_vv_i8mf8(...) __riscv_vadd_vv_i8mf8(__VA_ARGS__) |
| #define | vadd_vv_i8mf8_m(...) __riscv_vadd_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vadd_vv_u16m1(...) __riscv_vadd_vv_u16m1(__VA_ARGS__) |
| #define | vadd_vv_u16m1_m(...) __riscv_vadd_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vadd_vv_u16m2(...) __riscv_vadd_vv_u16m2(__VA_ARGS__) |
| #define | vadd_vv_u16m2_m(...) __riscv_vadd_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vadd_vv_u16m4(...) __riscv_vadd_vv_u16m4(__VA_ARGS__) |
| #define | vadd_vv_u16m4_m(...) __riscv_vadd_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vadd_vv_u16m8(...) __riscv_vadd_vv_u16m8(__VA_ARGS__) |
| #define | vadd_vv_u16m8_m(...) __riscv_vadd_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vadd_vv_u16mf2(...) __riscv_vadd_vv_u16mf2(__VA_ARGS__) |
| #define | vadd_vv_u16mf2_m(...) __riscv_vadd_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vadd_vv_u16mf4(...) __riscv_vadd_vv_u16mf4(__VA_ARGS__) |
| #define | vadd_vv_u16mf4_m(...) __riscv_vadd_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vadd_vv_u32m1(...) __riscv_vadd_vv_u32m1(__VA_ARGS__) |
| #define | vadd_vv_u32m1_m(...) __riscv_vadd_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vadd_vv_u32m2(...) __riscv_vadd_vv_u32m2(__VA_ARGS__) |
| #define | vadd_vv_u32m2_m(...) __riscv_vadd_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vadd_vv_u32m4(...) __riscv_vadd_vv_u32m4(__VA_ARGS__) |
| #define | vadd_vv_u32m4_m(...) __riscv_vadd_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vadd_vv_u32m8(...) __riscv_vadd_vv_u32m8(__VA_ARGS__) |
| #define | vadd_vv_u32m8_m(...) __riscv_vadd_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vadd_vv_u32mf2(...) __riscv_vadd_vv_u32mf2(__VA_ARGS__) |
| #define | vadd_vv_u32mf2_m(...) __riscv_vadd_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vadd_vv_u64m1(...) __riscv_vadd_vv_u64m1(__VA_ARGS__) |
| #define | vadd_vv_u64m1_m(...) __riscv_vadd_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vadd_vv_u64m2(...) __riscv_vadd_vv_u64m2(__VA_ARGS__) |
| #define | vadd_vv_u64m2_m(...) __riscv_vadd_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vadd_vv_u64m4(...) __riscv_vadd_vv_u64m4(__VA_ARGS__) |
| #define | vadd_vv_u64m4_m(...) __riscv_vadd_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vadd_vv_u64m8(...) __riscv_vadd_vv_u64m8(__VA_ARGS__) |
| #define | vadd_vv_u64m8_m(...) __riscv_vadd_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vadd_vv_u8m1(...) __riscv_vadd_vv_u8m1(__VA_ARGS__) |
| #define | vadd_vv_u8m1_m(...) __riscv_vadd_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vadd_vv_u8m2(...) __riscv_vadd_vv_u8m2(__VA_ARGS__) |
| #define | vadd_vv_u8m2_m(...) __riscv_vadd_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vadd_vv_u8m4(...) __riscv_vadd_vv_u8m4(__VA_ARGS__) |
| #define | vadd_vv_u8m4_m(...) __riscv_vadd_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vadd_vv_u8m8(...) __riscv_vadd_vv_u8m8(__VA_ARGS__) |
| #define | vadd_vv_u8m8_m(...) __riscv_vadd_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vadd_vv_u8mf2(...) __riscv_vadd_vv_u8mf2(__VA_ARGS__) |
| #define | vadd_vv_u8mf2_m(...) __riscv_vadd_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vadd_vv_u8mf4(...) __riscv_vadd_vv_u8mf4(__VA_ARGS__) |
| #define | vadd_vv_u8mf4_m(...) __riscv_vadd_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vadd_vv_u8mf8(...) __riscv_vadd_vv_u8mf8(__VA_ARGS__) |
| #define | vadd_vv_u8mf8_m(...) __riscv_vadd_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vadd_vx_i16m1(...) __riscv_vadd_vx_i16m1(__VA_ARGS__) |
| #define | vadd_vx_i16m1_m(...) __riscv_vadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vadd_vx_i16m2(...) __riscv_vadd_vx_i16m2(__VA_ARGS__) |
| #define | vadd_vx_i16m2_m(...) __riscv_vadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vadd_vx_i16m4(...) __riscv_vadd_vx_i16m4(__VA_ARGS__) |
| #define | vadd_vx_i16m4_m(...) __riscv_vadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vadd_vx_i16m8(...) __riscv_vadd_vx_i16m8(__VA_ARGS__) |
| #define | vadd_vx_i16m8_m(...) __riscv_vadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vadd_vx_i16mf2(...) __riscv_vadd_vx_i16mf2(__VA_ARGS__) |
| #define | vadd_vx_i16mf2_m(...) __riscv_vadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vadd_vx_i16mf4(...) __riscv_vadd_vx_i16mf4(__VA_ARGS__) |
| #define | vadd_vx_i16mf4_m(...) __riscv_vadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vadd_vx_i32m1(...) __riscv_vadd_vx_i32m1(__VA_ARGS__) |
| #define | vadd_vx_i32m1_m(...) __riscv_vadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vadd_vx_i32m2(...) __riscv_vadd_vx_i32m2(__VA_ARGS__) |
| #define | vadd_vx_i32m2_m(...) __riscv_vadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vadd_vx_i32m4(...) __riscv_vadd_vx_i32m4(__VA_ARGS__) |
| #define | vadd_vx_i32m4_m(...) __riscv_vadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vadd_vx_i32m8(...) __riscv_vadd_vx_i32m8(__VA_ARGS__) |
| #define | vadd_vx_i32m8_m(...) __riscv_vadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vadd_vx_i32mf2(...) __riscv_vadd_vx_i32mf2(__VA_ARGS__) |
| #define | vadd_vx_i32mf2_m(...) __riscv_vadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vadd_vx_i64m1(...) __riscv_vadd_vx_i64m1(__VA_ARGS__) |
| #define | vadd_vx_i64m1_m(...) __riscv_vadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vadd_vx_i64m2(...) __riscv_vadd_vx_i64m2(__VA_ARGS__) |
| #define | vadd_vx_i64m2_m(...) __riscv_vadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vadd_vx_i64m4(...) __riscv_vadd_vx_i64m4(__VA_ARGS__) |
| #define | vadd_vx_i64m4_m(...) __riscv_vadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vadd_vx_i64m8(...) __riscv_vadd_vx_i64m8(__VA_ARGS__) |
| #define | vadd_vx_i64m8_m(...) __riscv_vadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vadd_vx_i8m1(...) __riscv_vadd_vx_i8m1(__VA_ARGS__) |
| #define | vadd_vx_i8m1_m(...) __riscv_vadd_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vadd_vx_i8m2(...) __riscv_vadd_vx_i8m2(__VA_ARGS__) |
| #define | vadd_vx_i8m2_m(...) __riscv_vadd_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vadd_vx_i8m4(...) __riscv_vadd_vx_i8m4(__VA_ARGS__) |
| #define | vadd_vx_i8m4_m(...) __riscv_vadd_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vadd_vx_i8m8(...) __riscv_vadd_vx_i8m8(__VA_ARGS__) |
| #define | vadd_vx_i8m8_m(...) __riscv_vadd_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vadd_vx_i8mf2(...) __riscv_vadd_vx_i8mf2(__VA_ARGS__) |
| #define | vadd_vx_i8mf2_m(...) __riscv_vadd_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vadd_vx_i8mf4(...) __riscv_vadd_vx_i8mf4(__VA_ARGS__) |
| #define | vadd_vx_i8mf4_m(...) __riscv_vadd_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vadd_vx_i8mf8(...) __riscv_vadd_vx_i8mf8(__VA_ARGS__) |
| #define | vadd_vx_i8mf8_m(...) __riscv_vadd_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vadd_vx_u16m1(...) __riscv_vadd_vx_u16m1(__VA_ARGS__) |
| #define | vadd_vx_u16m1_m(...) __riscv_vadd_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vadd_vx_u16m2(...) __riscv_vadd_vx_u16m2(__VA_ARGS__) |
| #define | vadd_vx_u16m2_m(...) __riscv_vadd_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vadd_vx_u16m4(...) __riscv_vadd_vx_u16m4(__VA_ARGS__) |
| #define | vadd_vx_u16m4_m(...) __riscv_vadd_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vadd_vx_u16m8(...) __riscv_vadd_vx_u16m8(__VA_ARGS__) |
| #define | vadd_vx_u16m8_m(...) __riscv_vadd_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vadd_vx_u16mf2(...) __riscv_vadd_vx_u16mf2(__VA_ARGS__) |
| #define | vadd_vx_u16mf2_m(...) __riscv_vadd_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vadd_vx_u16mf4(...) __riscv_vadd_vx_u16mf4(__VA_ARGS__) |
| #define | vadd_vx_u16mf4_m(...) __riscv_vadd_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vadd_vx_u32m1(...) __riscv_vadd_vx_u32m1(__VA_ARGS__) |
| #define | vadd_vx_u32m1_m(...) __riscv_vadd_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vadd_vx_u32m2(...) __riscv_vadd_vx_u32m2(__VA_ARGS__) |
| #define | vadd_vx_u32m2_m(...) __riscv_vadd_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vadd_vx_u32m4(...) __riscv_vadd_vx_u32m4(__VA_ARGS__) |
| #define | vadd_vx_u32m4_m(...) __riscv_vadd_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vadd_vx_u32m8(...) __riscv_vadd_vx_u32m8(__VA_ARGS__) |
| #define | vadd_vx_u32m8_m(...) __riscv_vadd_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vadd_vx_u32mf2(...) __riscv_vadd_vx_u32mf2(__VA_ARGS__) |
| #define | vadd_vx_u32mf2_m(...) __riscv_vadd_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vadd_vx_u64m1(...) __riscv_vadd_vx_u64m1(__VA_ARGS__) |
| #define | vadd_vx_u64m1_m(...) __riscv_vadd_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vadd_vx_u64m2(...) __riscv_vadd_vx_u64m2(__VA_ARGS__) |
| #define | vadd_vx_u64m2_m(...) __riscv_vadd_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vadd_vx_u64m4(...) __riscv_vadd_vx_u64m4(__VA_ARGS__) |
| #define | vadd_vx_u64m4_m(...) __riscv_vadd_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vadd_vx_u64m8(...) __riscv_vadd_vx_u64m8(__VA_ARGS__) |
| #define | vadd_vx_u64m8_m(...) __riscv_vadd_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vadd_vx_u8m1(...) __riscv_vadd_vx_u8m1(__VA_ARGS__) |
| #define | vadd_vx_u8m1_m(...) __riscv_vadd_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vadd_vx_u8m2(...) __riscv_vadd_vx_u8m2(__VA_ARGS__) |
| #define | vadd_vx_u8m2_m(...) __riscv_vadd_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vadd_vx_u8m4(...) __riscv_vadd_vx_u8m4(__VA_ARGS__) |
| #define | vadd_vx_u8m4_m(...) __riscv_vadd_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vadd_vx_u8m8(...) __riscv_vadd_vx_u8m8(__VA_ARGS__) |
| #define | vadd_vx_u8m8_m(...) __riscv_vadd_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vadd_vx_u8mf2(...) __riscv_vadd_vx_u8mf2(__VA_ARGS__) |
| #define | vadd_vx_u8mf2_m(...) __riscv_vadd_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vadd_vx_u8mf4(...) __riscv_vadd_vx_u8mf4(__VA_ARGS__) |
| #define | vadd_vx_u8mf4_m(...) __riscv_vadd_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vadd_vx_u8mf8(...) __riscv_vadd_vx_u8mf8(__VA_ARGS__) |
| #define | vadd_vx_u8mf8_m(...) __riscv_vadd_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vand_vv_i16m1(...) __riscv_vand_vv_i16m1(__VA_ARGS__) |
| #define | vand_vv_i16m1_m(...) __riscv_vand_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vand_vv_i16m2(...) __riscv_vand_vv_i16m2(__VA_ARGS__) |
| #define | vand_vv_i16m2_m(...) __riscv_vand_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vand_vv_i16m4(...) __riscv_vand_vv_i16m4(__VA_ARGS__) |
| #define | vand_vv_i16m4_m(...) __riscv_vand_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vand_vv_i16m8(...) __riscv_vand_vv_i16m8(__VA_ARGS__) |
| #define | vand_vv_i16m8_m(...) __riscv_vand_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vand_vv_i16mf2(...) __riscv_vand_vv_i16mf2(__VA_ARGS__) |
| #define | vand_vv_i16mf2_m(...) __riscv_vand_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vand_vv_i16mf4(...) __riscv_vand_vv_i16mf4(__VA_ARGS__) |
| #define | vand_vv_i16mf4_m(...) __riscv_vand_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vand_vv_i32m1(...) __riscv_vand_vv_i32m1(__VA_ARGS__) |
| #define | vand_vv_i32m1_m(...) __riscv_vand_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vand_vv_i32m2(...) __riscv_vand_vv_i32m2(__VA_ARGS__) |
| #define | vand_vv_i32m2_m(...) __riscv_vand_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vand_vv_i32m4(...) __riscv_vand_vv_i32m4(__VA_ARGS__) |
| #define | vand_vv_i32m4_m(...) __riscv_vand_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vand_vv_i32m8(...) __riscv_vand_vv_i32m8(__VA_ARGS__) |
| #define | vand_vv_i32m8_m(...) __riscv_vand_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vand_vv_i32mf2(...) __riscv_vand_vv_i32mf2(__VA_ARGS__) |
| #define | vand_vv_i32mf2_m(...) __riscv_vand_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vand_vv_i64m1(...) __riscv_vand_vv_i64m1(__VA_ARGS__) |
| #define | vand_vv_i64m1_m(...) __riscv_vand_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vand_vv_i64m2(...) __riscv_vand_vv_i64m2(__VA_ARGS__) |
| #define | vand_vv_i64m2_m(...) __riscv_vand_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vand_vv_i64m4(...) __riscv_vand_vv_i64m4(__VA_ARGS__) |
| #define | vand_vv_i64m4_m(...) __riscv_vand_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vand_vv_i64m8(...) __riscv_vand_vv_i64m8(__VA_ARGS__) |
| #define | vand_vv_i64m8_m(...) __riscv_vand_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vand_vv_i8m1(...) __riscv_vand_vv_i8m1(__VA_ARGS__) |
| #define | vand_vv_i8m1_m(...) __riscv_vand_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vand_vv_i8m2(...) __riscv_vand_vv_i8m2(__VA_ARGS__) |
| #define | vand_vv_i8m2_m(...) __riscv_vand_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vand_vv_i8m4(...) __riscv_vand_vv_i8m4(__VA_ARGS__) |
| #define | vand_vv_i8m4_m(...) __riscv_vand_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vand_vv_i8m8(...) __riscv_vand_vv_i8m8(__VA_ARGS__) |
| #define | vand_vv_i8m8_m(...) __riscv_vand_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vand_vv_i8mf2(...) __riscv_vand_vv_i8mf2(__VA_ARGS__) |
| #define | vand_vv_i8mf2_m(...) __riscv_vand_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vand_vv_i8mf4(...) __riscv_vand_vv_i8mf4(__VA_ARGS__) |
| #define | vand_vv_i8mf4_m(...) __riscv_vand_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vand_vv_i8mf8(...) __riscv_vand_vv_i8mf8(__VA_ARGS__) |
| #define | vand_vv_i8mf8_m(...) __riscv_vand_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vand_vv_u16m1(...) __riscv_vand_vv_u16m1(__VA_ARGS__) |
| #define | vand_vv_u16m1_m(...) __riscv_vand_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vand_vv_u16m2(...) __riscv_vand_vv_u16m2(__VA_ARGS__) |
| #define | vand_vv_u16m2_m(...) __riscv_vand_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vand_vv_u16m4(...) __riscv_vand_vv_u16m4(__VA_ARGS__) |
| #define | vand_vv_u16m4_m(...) __riscv_vand_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vand_vv_u16m8(...) __riscv_vand_vv_u16m8(__VA_ARGS__) |
| #define | vand_vv_u16m8_m(...) __riscv_vand_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vand_vv_u16mf2(...) __riscv_vand_vv_u16mf2(__VA_ARGS__) |
| #define | vand_vv_u16mf2_m(...) __riscv_vand_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vand_vv_u16mf4(...) __riscv_vand_vv_u16mf4(__VA_ARGS__) |
| #define | vand_vv_u16mf4_m(...) __riscv_vand_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vand_vv_u32m1(...) __riscv_vand_vv_u32m1(__VA_ARGS__) |
| #define | vand_vv_u32m1_m(...) __riscv_vand_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vand_vv_u32m2(...) __riscv_vand_vv_u32m2(__VA_ARGS__) |
| #define | vand_vv_u32m2_m(...) __riscv_vand_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vand_vv_u32m4(...) __riscv_vand_vv_u32m4(__VA_ARGS__) |
| #define | vand_vv_u32m4_m(...) __riscv_vand_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vand_vv_u32m8(...) __riscv_vand_vv_u32m8(__VA_ARGS__) |
| #define | vand_vv_u32m8_m(...) __riscv_vand_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vand_vv_u32mf2(...) __riscv_vand_vv_u32mf2(__VA_ARGS__) |
| #define | vand_vv_u32mf2_m(...) __riscv_vand_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vand_vv_u64m1(...) __riscv_vand_vv_u64m1(__VA_ARGS__) |
| #define | vand_vv_u64m1_m(...) __riscv_vand_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vand_vv_u64m2(...) __riscv_vand_vv_u64m2(__VA_ARGS__) |
| #define | vand_vv_u64m2_m(...) __riscv_vand_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vand_vv_u64m4(...) __riscv_vand_vv_u64m4(__VA_ARGS__) |
| #define | vand_vv_u64m4_m(...) __riscv_vand_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vand_vv_u64m8(...) __riscv_vand_vv_u64m8(__VA_ARGS__) |
| #define | vand_vv_u64m8_m(...) __riscv_vand_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vand_vv_u8m1(...) __riscv_vand_vv_u8m1(__VA_ARGS__) |
| #define | vand_vv_u8m1_m(...) __riscv_vand_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vand_vv_u8m2(...) __riscv_vand_vv_u8m2(__VA_ARGS__) |
| #define | vand_vv_u8m2_m(...) __riscv_vand_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vand_vv_u8m4(...) __riscv_vand_vv_u8m4(__VA_ARGS__) |
| #define | vand_vv_u8m4_m(...) __riscv_vand_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vand_vv_u8m8(...) __riscv_vand_vv_u8m8(__VA_ARGS__) |
| #define | vand_vv_u8m8_m(...) __riscv_vand_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vand_vv_u8mf2(...) __riscv_vand_vv_u8mf2(__VA_ARGS__) |
| #define | vand_vv_u8mf2_m(...) __riscv_vand_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vand_vv_u8mf4(...) __riscv_vand_vv_u8mf4(__VA_ARGS__) |
| #define | vand_vv_u8mf4_m(...) __riscv_vand_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vand_vv_u8mf8(...) __riscv_vand_vv_u8mf8(__VA_ARGS__) |
| #define | vand_vv_u8mf8_m(...) __riscv_vand_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vand_vx_i16m1(...) __riscv_vand_vx_i16m1(__VA_ARGS__) |
| #define | vand_vx_i16m1_m(...) __riscv_vand_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vand_vx_i16m2(...) __riscv_vand_vx_i16m2(__VA_ARGS__) |
| #define | vand_vx_i16m2_m(...) __riscv_vand_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vand_vx_i16m4(...) __riscv_vand_vx_i16m4(__VA_ARGS__) |
| #define | vand_vx_i16m4_m(...) __riscv_vand_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vand_vx_i16m8(...) __riscv_vand_vx_i16m8(__VA_ARGS__) |
| #define | vand_vx_i16m8_m(...) __riscv_vand_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vand_vx_i16mf2(...) __riscv_vand_vx_i16mf2(__VA_ARGS__) |
| #define | vand_vx_i16mf2_m(...) __riscv_vand_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vand_vx_i16mf4(...) __riscv_vand_vx_i16mf4(__VA_ARGS__) |
| #define | vand_vx_i16mf4_m(...) __riscv_vand_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vand_vx_i32m1(...) __riscv_vand_vx_i32m1(__VA_ARGS__) |
| #define | vand_vx_i32m1_m(...) __riscv_vand_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vand_vx_i32m2(...) __riscv_vand_vx_i32m2(__VA_ARGS__) |
| #define | vand_vx_i32m2_m(...) __riscv_vand_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vand_vx_i32m4(...) __riscv_vand_vx_i32m4(__VA_ARGS__) |
| #define | vand_vx_i32m4_m(...) __riscv_vand_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vand_vx_i32m8(...) __riscv_vand_vx_i32m8(__VA_ARGS__) |
| #define | vand_vx_i32m8_m(...) __riscv_vand_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vand_vx_i32mf2(...) __riscv_vand_vx_i32mf2(__VA_ARGS__) |
| #define | vand_vx_i32mf2_m(...) __riscv_vand_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vand_vx_i64m1(...) __riscv_vand_vx_i64m1(__VA_ARGS__) |
| #define | vand_vx_i64m1_m(...) __riscv_vand_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vand_vx_i64m2(...) __riscv_vand_vx_i64m2(__VA_ARGS__) |
| #define | vand_vx_i64m2_m(...) __riscv_vand_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vand_vx_i64m4(...) __riscv_vand_vx_i64m4(__VA_ARGS__) |
| #define | vand_vx_i64m4_m(...) __riscv_vand_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vand_vx_i64m8(...) __riscv_vand_vx_i64m8(__VA_ARGS__) |
| #define | vand_vx_i64m8_m(...) __riscv_vand_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vand_vx_i8m1(...) __riscv_vand_vx_i8m1(__VA_ARGS__) |
| #define | vand_vx_i8m1_m(...) __riscv_vand_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vand_vx_i8m2(...) __riscv_vand_vx_i8m2(__VA_ARGS__) |
| #define | vand_vx_i8m2_m(...) __riscv_vand_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vand_vx_i8m4(...) __riscv_vand_vx_i8m4(__VA_ARGS__) |
| #define | vand_vx_i8m4_m(...) __riscv_vand_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vand_vx_i8m8(...) __riscv_vand_vx_i8m8(__VA_ARGS__) |
| #define | vand_vx_i8m8_m(...) __riscv_vand_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vand_vx_i8mf2(...) __riscv_vand_vx_i8mf2(__VA_ARGS__) |
| #define | vand_vx_i8mf2_m(...) __riscv_vand_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vand_vx_i8mf4(...) __riscv_vand_vx_i8mf4(__VA_ARGS__) |
| #define | vand_vx_i8mf4_m(...) __riscv_vand_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vand_vx_i8mf8(...) __riscv_vand_vx_i8mf8(__VA_ARGS__) |
| #define | vand_vx_i8mf8_m(...) __riscv_vand_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vand_vx_u16m1(...) __riscv_vand_vx_u16m1(__VA_ARGS__) |
| #define | vand_vx_u16m1_m(...) __riscv_vand_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vand_vx_u16m2(...) __riscv_vand_vx_u16m2(__VA_ARGS__) |
| #define | vand_vx_u16m2_m(...) __riscv_vand_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vand_vx_u16m4(...) __riscv_vand_vx_u16m4(__VA_ARGS__) |
| #define | vand_vx_u16m4_m(...) __riscv_vand_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vand_vx_u16m8(...) __riscv_vand_vx_u16m8(__VA_ARGS__) |
| #define | vand_vx_u16m8_m(...) __riscv_vand_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vand_vx_u16mf2(...) __riscv_vand_vx_u16mf2(__VA_ARGS__) |
| #define | vand_vx_u16mf2_m(...) __riscv_vand_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vand_vx_u16mf4(...) __riscv_vand_vx_u16mf4(__VA_ARGS__) |
| #define | vand_vx_u16mf4_m(...) __riscv_vand_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vand_vx_u32m1(...) __riscv_vand_vx_u32m1(__VA_ARGS__) |
| #define | vand_vx_u32m1_m(...) __riscv_vand_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vand_vx_u32m2(...) __riscv_vand_vx_u32m2(__VA_ARGS__) |
| #define | vand_vx_u32m2_m(...) __riscv_vand_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vand_vx_u32m4(...) __riscv_vand_vx_u32m4(__VA_ARGS__) |
| #define | vand_vx_u32m4_m(...) __riscv_vand_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vand_vx_u32m8(...) __riscv_vand_vx_u32m8(__VA_ARGS__) |
| #define | vand_vx_u32m8_m(...) __riscv_vand_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vand_vx_u32mf2(...) __riscv_vand_vx_u32mf2(__VA_ARGS__) |
| #define | vand_vx_u32mf2_m(...) __riscv_vand_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vand_vx_u64m1(...) __riscv_vand_vx_u64m1(__VA_ARGS__) |
| #define | vand_vx_u64m1_m(...) __riscv_vand_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vand_vx_u64m2(...) __riscv_vand_vx_u64m2(__VA_ARGS__) |
| #define | vand_vx_u64m2_m(...) __riscv_vand_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vand_vx_u64m4(...) __riscv_vand_vx_u64m4(__VA_ARGS__) |
| #define | vand_vx_u64m4_m(...) __riscv_vand_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vand_vx_u64m8(...) __riscv_vand_vx_u64m8(__VA_ARGS__) |
| #define | vand_vx_u64m8_m(...) __riscv_vand_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vand_vx_u8m1(...) __riscv_vand_vx_u8m1(__VA_ARGS__) |
| #define | vand_vx_u8m1_m(...) __riscv_vand_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vand_vx_u8m2(...) __riscv_vand_vx_u8m2(__VA_ARGS__) |
| #define | vand_vx_u8m2_m(...) __riscv_vand_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vand_vx_u8m4(...) __riscv_vand_vx_u8m4(__VA_ARGS__) |
| #define | vand_vx_u8m4_m(...) __riscv_vand_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vand_vx_u8m8(...) __riscv_vand_vx_u8m8(__VA_ARGS__) |
| #define | vand_vx_u8m8_m(...) __riscv_vand_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vand_vx_u8mf2(...) __riscv_vand_vx_u8mf2(__VA_ARGS__) |
| #define | vand_vx_u8mf2_m(...) __riscv_vand_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vand_vx_u8mf4(...) __riscv_vand_vx_u8mf4(__VA_ARGS__) |
| #define | vand_vx_u8mf4_m(...) __riscv_vand_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vand_vx_u8mf8(...) __riscv_vand_vx_u8mf8(__VA_ARGS__) |
| #define | vand_vx_u8mf8_m(...) __riscv_vand_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vasub_vv_i16m1(...) __riscv_vasub_vv_i16m1(__VA_ARGS__) |
| #define | vasub_vv_i16m1_m(...) __riscv_vasub_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vasub_vv_i16m2(...) __riscv_vasub_vv_i16m2(__VA_ARGS__) |
| #define | vasub_vv_i16m2_m(...) __riscv_vasub_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vasub_vv_i16m4(...) __riscv_vasub_vv_i16m4(__VA_ARGS__) |
| #define | vasub_vv_i16m4_m(...) __riscv_vasub_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vasub_vv_i16m8(...) __riscv_vasub_vv_i16m8(__VA_ARGS__) |
| #define | vasub_vv_i16m8_m(...) __riscv_vasub_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vasub_vv_i16mf2(...) __riscv_vasub_vv_i16mf2(__VA_ARGS__) |
| #define | vasub_vv_i16mf2_m(...) __riscv_vasub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vasub_vv_i16mf4(...) __riscv_vasub_vv_i16mf4(__VA_ARGS__) |
| #define | vasub_vv_i16mf4_m(...) __riscv_vasub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vasub_vv_i32m1(...) __riscv_vasub_vv_i32m1(__VA_ARGS__) |
| #define | vasub_vv_i32m1_m(...) __riscv_vasub_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vasub_vv_i32m2(...) __riscv_vasub_vv_i32m2(__VA_ARGS__) |
| #define | vasub_vv_i32m2_m(...) __riscv_vasub_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vasub_vv_i32m4(...) __riscv_vasub_vv_i32m4(__VA_ARGS__) |
| #define | vasub_vv_i32m4_m(...) __riscv_vasub_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vasub_vv_i32m8(...) __riscv_vasub_vv_i32m8(__VA_ARGS__) |
| #define | vasub_vv_i32m8_m(...) __riscv_vasub_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vasub_vv_i32mf2(...) __riscv_vasub_vv_i32mf2(__VA_ARGS__) |
| #define | vasub_vv_i32mf2_m(...) __riscv_vasub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vasub_vv_i64m1(...) __riscv_vasub_vv_i64m1(__VA_ARGS__) |
| #define | vasub_vv_i64m1_m(...) __riscv_vasub_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vasub_vv_i64m2(...) __riscv_vasub_vv_i64m2(__VA_ARGS__) |
| #define | vasub_vv_i64m2_m(...) __riscv_vasub_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vasub_vv_i64m4(...) __riscv_vasub_vv_i64m4(__VA_ARGS__) |
| #define | vasub_vv_i64m4_m(...) __riscv_vasub_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vasub_vv_i64m8(...) __riscv_vasub_vv_i64m8(__VA_ARGS__) |
| #define | vasub_vv_i64m8_m(...) __riscv_vasub_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vasub_vv_i8m1(...) __riscv_vasub_vv_i8m1(__VA_ARGS__) |
| #define | vasub_vv_i8m1_m(...) __riscv_vasub_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vasub_vv_i8m2(...) __riscv_vasub_vv_i8m2(__VA_ARGS__) |
| #define | vasub_vv_i8m2_m(...) __riscv_vasub_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vasub_vv_i8m4(...) __riscv_vasub_vv_i8m4(__VA_ARGS__) |
| #define | vasub_vv_i8m4_m(...) __riscv_vasub_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vasub_vv_i8m8(...) __riscv_vasub_vv_i8m8(__VA_ARGS__) |
| #define | vasub_vv_i8m8_m(...) __riscv_vasub_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vasub_vv_i8mf2(...) __riscv_vasub_vv_i8mf2(__VA_ARGS__) |
| #define | vasub_vv_i8mf2_m(...) __riscv_vasub_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vasub_vv_i8mf4(...) __riscv_vasub_vv_i8mf4(__VA_ARGS__) |
| #define | vasub_vv_i8mf4_m(...) __riscv_vasub_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vasub_vv_i8mf8(...) __riscv_vasub_vv_i8mf8(__VA_ARGS__) |
| #define | vasub_vv_i8mf8_m(...) __riscv_vasub_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vasub_vx_i16m1(...) __riscv_vasub_vx_i16m1(__VA_ARGS__) |
| #define | vasub_vx_i16m1_m(...) __riscv_vasub_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vasub_vx_i16m2(...) __riscv_vasub_vx_i16m2(__VA_ARGS__) |
| #define | vasub_vx_i16m2_m(...) __riscv_vasub_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vasub_vx_i16m4(...) __riscv_vasub_vx_i16m4(__VA_ARGS__) |
| #define | vasub_vx_i16m4_m(...) __riscv_vasub_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vasub_vx_i16m8(...) __riscv_vasub_vx_i16m8(__VA_ARGS__) |
| #define | vasub_vx_i16m8_m(...) __riscv_vasub_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vasub_vx_i16mf2(...) __riscv_vasub_vx_i16mf2(__VA_ARGS__) |
| #define | vasub_vx_i16mf2_m(...) __riscv_vasub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vasub_vx_i16mf4(...) __riscv_vasub_vx_i16mf4(__VA_ARGS__) |
| #define | vasub_vx_i16mf4_m(...) __riscv_vasub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vasub_vx_i32m1(...) __riscv_vasub_vx_i32m1(__VA_ARGS__) |
| #define | vasub_vx_i32m1_m(...) __riscv_vasub_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vasub_vx_i32m2(...) __riscv_vasub_vx_i32m2(__VA_ARGS__) |
| #define | vasub_vx_i32m2_m(...) __riscv_vasub_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vasub_vx_i32m4(...) __riscv_vasub_vx_i32m4(__VA_ARGS__) |
| #define | vasub_vx_i32m4_m(...) __riscv_vasub_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vasub_vx_i32m8(...) __riscv_vasub_vx_i32m8(__VA_ARGS__) |
| #define | vasub_vx_i32m8_m(...) __riscv_vasub_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vasub_vx_i32mf2(...) __riscv_vasub_vx_i32mf2(__VA_ARGS__) |
| #define | vasub_vx_i32mf2_m(...) __riscv_vasub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vasub_vx_i64m1(...) __riscv_vasub_vx_i64m1(__VA_ARGS__) |
| #define | vasub_vx_i64m1_m(...) __riscv_vasub_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vasub_vx_i64m2(...) __riscv_vasub_vx_i64m2(__VA_ARGS__) |
| #define | vasub_vx_i64m2_m(...) __riscv_vasub_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vasub_vx_i64m4(...) __riscv_vasub_vx_i64m4(__VA_ARGS__) |
| #define | vasub_vx_i64m4_m(...) __riscv_vasub_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vasub_vx_i64m8(...) __riscv_vasub_vx_i64m8(__VA_ARGS__) |
| #define | vasub_vx_i64m8_m(...) __riscv_vasub_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vasub_vx_i8m1(...) __riscv_vasub_vx_i8m1(__VA_ARGS__) |
| #define | vasub_vx_i8m1_m(...) __riscv_vasub_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vasub_vx_i8m2(...) __riscv_vasub_vx_i8m2(__VA_ARGS__) |
| #define | vasub_vx_i8m2_m(...) __riscv_vasub_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vasub_vx_i8m4(...) __riscv_vasub_vx_i8m4(__VA_ARGS__) |
| #define | vasub_vx_i8m4_m(...) __riscv_vasub_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vasub_vx_i8m8(...) __riscv_vasub_vx_i8m8(__VA_ARGS__) |
| #define | vasub_vx_i8m8_m(...) __riscv_vasub_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vasub_vx_i8mf2(...) __riscv_vasub_vx_i8mf2(__VA_ARGS__) |
| #define | vasub_vx_i8mf2_m(...) __riscv_vasub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vasub_vx_i8mf4(...) __riscv_vasub_vx_i8mf4(__VA_ARGS__) |
| #define | vasub_vx_i8mf4_m(...) __riscv_vasub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vasub_vx_i8mf8(...) __riscv_vasub_vx_i8mf8(__VA_ARGS__) |
| #define | vasub_vx_i8mf8_m(...) __riscv_vasub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u16m1(...) __riscv_vasubu_vv_u16m1(__VA_ARGS__) |
| #define | vasubu_vv_u16m1_m(...) __riscv_vasubu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u16m2(...) __riscv_vasubu_vv_u16m2(__VA_ARGS__) |
| #define | vasubu_vv_u16m2_m(...) __riscv_vasubu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u16m4(...) __riscv_vasubu_vv_u16m4(__VA_ARGS__) |
| #define | vasubu_vv_u16m4_m(...) __riscv_vasubu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u16m8(...) __riscv_vasubu_vv_u16m8(__VA_ARGS__) |
| #define | vasubu_vv_u16m8_m(...) __riscv_vasubu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u16mf2(...) __riscv_vasubu_vv_u16mf2(__VA_ARGS__) |
| #define | vasubu_vv_u16mf2_m(...) __riscv_vasubu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u16mf4(...) __riscv_vasubu_vv_u16mf4(__VA_ARGS__) |
| #define | vasubu_vv_u16mf4_m(...) __riscv_vasubu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u32m1(...) __riscv_vasubu_vv_u32m1(__VA_ARGS__) |
| #define | vasubu_vv_u32m1_m(...) __riscv_vasubu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u32m2(...) __riscv_vasubu_vv_u32m2(__VA_ARGS__) |
| #define | vasubu_vv_u32m2_m(...) __riscv_vasubu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u32m4(...) __riscv_vasubu_vv_u32m4(__VA_ARGS__) |
| #define | vasubu_vv_u32m4_m(...) __riscv_vasubu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u32m8(...) __riscv_vasubu_vv_u32m8(__VA_ARGS__) |
| #define | vasubu_vv_u32m8_m(...) __riscv_vasubu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u32mf2(...) __riscv_vasubu_vv_u32mf2(__VA_ARGS__) |
| #define | vasubu_vv_u32mf2_m(...) __riscv_vasubu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u64m1(...) __riscv_vasubu_vv_u64m1(__VA_ARGS__) |
| #define | vasubu_vv_u64m1_m(...) __riscv_vasubu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u64m2(...) __riscv_vasubu_vv_u64m2(__VA_ARGS__) |
| #define | vasubu_vv_u64m2_m(...) __riscv_vasubu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u64m4(...) __riscv_vasubu_vv_u64m4(__VA_ARGS__) |
| #define | vasubu_vv_u64m4_m(...) __riscv_vasubu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u64m8(...) __riscv_vasubu_vv_u64m8(__VA_ARGS__) |
| #define | vasubu_vv_u64m8_m(...) __riscv_vasubu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u8m1(...) __riscv_vasubu_vv_u8m1(__VA_ARGS__) |
| #define | vasubu_vv_u8m1_m(...) __riscv_vasubu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u8m2(...) __riscv_vasubu_vv_u8m2(__VA_ARGS__) |
| #define | vasubu_vv_u8m2_m(...) __riscv_vasubu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u8m4(...) __riscv_vasubu_vv_u8m4(__VA_ARGS__) |
| #define | vasubu_vv_u8m4_m(...) __riscv_vasubu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u8m8(...) __riscv_vasubu_vv_u8m8(__VA_ARGS__) |
| #define | vasubu_vv_u8m8_m(...) __riscv_vasubu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u8mf2(...) __riscv_vasubu_vv_u8mf2(__VA_ARGS__) |
| #define | vasubu_vv_u8mf2_m(...) __riscv_vasubu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u8mf4(...) __riscv_vasubu_vv_u8mf4(__VA_ARGS__) |
| #define | vasubu_vv_u8mf4_m(...) __riscv_vasubu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vasubu_vv_u8mf8(...) __riscv_vasubu_vv_u8mf8(__VA_ARGS__) |
| #define | vasubu_vv_u8mf8_m(...) __riscv_vasubu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u16m1(...) __riscv_vasubu_vx_u16m1(__VA_ARGS__) |
| #define | vasubu_vx_u16m1_m(...) __riscv_vasubu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u16m2(...) __riscv_vasubu_vx_u16m2(__VA_ARGS__) |
| #define | vasubu_vx_u16m2_m(...) __riscv_vasubu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u16m4(...) __riscv_vasubu_vx_u16m4(__VA_ARGS__) |
| #define | vasubu_vx_u16m4_m(...) __riscv_vasubu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u16m8(...) __riscv_vasubu_vx_u16m8(__VA_ARGS__) |
| #define | vasubu_vx_u16m8_m(...) __riscv_vasubu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u16mf2(...) __riscv_vasubu_vx_u16mf2(__VA_ARGS__) |
| #define | vasubu_vx_u16mf2_m(...) __riscv_vasubu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u16mf4(...) __riscv_vasubu_vx_u16mf4(__VA_ARGS__) |
| #define | vasubu_vx_u16mf4_m(...) __riscv_vasubu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u32m1(...) __riscv_vasubu_vx_u32m1(__VA_ARGS__) |
| #define | vasubu_vx_u32m1_m(...) __riscv_vasubu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u32m2(...) __riscv_vasubu_vx_u32m2(__VA_ARGS__) |
| #define | vasubu_vx_u32m2_m(...) __riscv_vasubu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u32m4(...) __riscv_vasubu_vx_u32m4(__VA_ARGS__) |
| #define | vasubu_vx_u32m4_m(...) __riscv_vasubu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u32m8(...) __riscv_vasubu_vx_u32m8(__VA_ARGS__) |
| #define | vasubu_vx_u32m8_m(...) __riscv_vasubu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u32mf2(...) __riscv_vasubu_vx_u32mf2(__VA_ARGS__) |
| #define | vasubu_vx_u32mf2_m(...) __riscv_vasubu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u64m1(...) __riscv_vasubu_vx_u64m1(__VA_ARGS__) |
| #define | vasubu_vx_u64m1_m(...) __riscv_vasubu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u64m2(...) __riscv_vasubu_vx_u64m2(__VA_ARGS__) |
| #define | vasubu_vx_u64m2_m(...) __riscv_vasubu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u64m4(...) __riscv_vasubu_vx_u64m4(__VA_ARGS__) |
| #define | vasubu_vx_u64m4_m(...) __riscv_vasubu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u64m8(...) __riscv_vasubu_vx_u64m8(__VA_ARGS__) |
| #define | vasubu_vx_u64m8_m(...) __riscv_vasubu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u8m1(...) __riscv_vasubu_vx_u8m1(__VA_ARGS__) |
| #define | vasubu_vx_u8m1_m(...) __riscv_vasubu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u8m2(...) __riscv_vasubu_vx_u8m2(__VA_ARGS__) |
| #define | vasubu_vx_u8m2_m(...) __riscv_vasubu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u8m4(...) __riscv_vasubu_vx_u8m4(__VA_ARGS__) |
| #define | vasubu_vx_u8m4_m(...) __riscv_vasubu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u8m8(...) __riscv_vasubu_vx_u8m8(__VA_ARGS__) |
| #define | vasubu_vx_u8m8_m(...) __riscv_vasubu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u8mf2(...) __riscv_vasubu_vx_u8mf2(__VA_ARGS__) |
| #define | vasubu_vx_u8mf2_m(...) __riscv_vasubu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u8mf4(...) __riscv_vasubu_vx_u8mf4(__VA_ARGS__) |
| #define | vasubu_vx_u8mf4_m(...) __riscv_vasubu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vasubu_vx_u8mf8(...) __riscv_vasubu_vx_u8mf8(__VA_ARGS__) |
| #define | vasubu_vx_u8mf8_m(...) __riscv_vasubu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vcompress_vm_f16m1(mask, dest, src, vl) __riscv_vcompress_vm_f16m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f16m2(mask, dest, src, vl) __riscv_vcompress_vm_f16m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f16m4(mask, dest, src, vl) __riscv_vcompress_vm_f16m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f16m8(mask, dest, src, vl) __riscv_vcompress_vm_f16m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f16mf2(mask, dest, src, vl) __riscv_vcompress_vm_f16mf2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f16mf4(mask, dest, src, vl) __riscv_vcompress_vm_f16mf4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f32m1(mask, dest, src, vl) __riscv_vcompress_vm_f32m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f32m2(mask, dest, src, vl) __riscv_vcompress_vm_f32m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f32m4(mask, dest, src, vl) __riscv_vcompress_vm_f32m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f32m8(mask, dest, src, vl) __riscv_vcompress_vm_f32m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f32mf2(mask, dest, src, vl) __riscv_vcompress_vm_f32mf2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f64m1(mask, dest, src, vl) __riscv_vcompress_vm_f64m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f64m2(mask, dest, src, vl) __riscv_vcompress_vm_f64m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f64m4(mask, dest, src, vl) __riscv_vcompress_vm_f64m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_f64m8(mask, dest, src, vl) __riscv_vcompress_vm_f64m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i16m1(mask, dest, src, vl) __riscv_vcompress_vm_i16m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i16m2(mask, dest, src, vl) __riscv_vcompress_vm_i16m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i16m4(mask, dest, src, vl) __riscv_vcompress_vm_i16m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i16m8(mask, dest, src, vl) __riscv_vcompress_vm_i16m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i16mf2(mask, dest, src, vl) __riscv_vcompress_vm_i16mf2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i16mf4(mask, dest, src, vl) __riscv_vcompress_vm_i16mf4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i32m1(mask, dest, src, vl) __riscv_vcompress_vm_i32m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i32m2(mask, dest, src, vl) __riscv_vcompress_vm_i32m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i32m4(mask, dest, src, vl) __riscv_vcompress_vm_i32m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i32m8(mask, dest, src, vl) __riscv_vcompress_vm_i32m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i32mf2(mask, dest, src, vl) __riscv_vcompress_vm_i32mf2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i64m1(mask, dest, src, vl) __riscv_vcompress_vm_i64m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i64m2(mask, dest, src, vl) __riscv_vcompress_vm_i64m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i64m4(mask, dest, src, vl) __riscv_vcompress_vm_i64m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i64m8(mask, dest, src, vl) __riscv_vcompress_vm_i64m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i8m1(mask, dest, src, vl) __riscv_vcompress_vm_i8m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i8m2(mask, dest, src, vl) __riscv_vcompress_vm_i8m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i8m4(mask, dest, src, vl) __riscv_vcompress_vm_i8m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i8m8(mask, dest, src, vl) __riscv_vcompress_vm_i8m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i8mf2(mask, dest, src, vl) __riscv_vcompress_vm_i8mf2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i8mf4(mask, dest, src, vl) __riscv_vcompress_vm_i8mf4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_i8mf8(mask, dest, src, vl) __riscv_vcompress_vm_i8mf8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u16m1(mask, dest, src, vl) __riscv_vcompress_vm_u16m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u16m2(mask, dest, src, vl) __riscv_vcompress_vm_u16m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u16m4(mask, dest, src, vl) __riscv_vcompress_vm_u16m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u16m8(mask, dest, src, vl) __riscv_vcompress_vm_u16m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u16mf2(mask, dest, src, vl) __riscv_vcompress_vm_u16mf2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u16mf4(mask, dest, src, vl) __riscv_vcompress_vm_u16mf4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u32m1(mask, dest, src, vl) __riscv_vcompress_vm_u32m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u32m2(mask, dest, src, vl) __riscv_vcompress_vm_u32m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u32m4(mask, dest, src, vl) __riscv_vcompress_vm_u32m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u32m8(mask, dest, src, vl) __riscv_vcompress_vm_u32m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u32mf2(mask, dest, src, vl) __riscv_vcompress_vm_u32mf2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u64m1(mask, dest, src, vl) __riscv_vcompress_vm_u64m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u64m2(mask, dest, src, vl) __riscv_vcompress_vm_u64m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u64m4(mask, dest, src, vl) __riscv_vcompress_vm_u64m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u64m8(mask, dest, src, vl) __riscv_vcompress_vm_u64m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u8m1(mask, dest, src, vl) __riscv_vcompress_vm_u8m1_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u8m2(mask, dest, src, vl) __riscv_vcompress_vm_u8m2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u8m4(mask, dest, src, vl) __riscv_vcompress_vm_u8m4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u8m8(mask, dest, src, vl) __riscv_vcompress_vm_u8m8_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u8mf2(mask, dest, src, vl) __riscv_vcompress_vm_u8mf2_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u8mf4(mask, dest, src, vl) __riscv_vcompress_vm_u8mf4_tu((dest), (src), (mask), (vl)) |
| #define | vcompress_vm_u8mf8(mask, dest, src, vl) __riscv_vcompress_vm_u8mf8_tu((dest), (src), (mask), (vl)) |
| #define | vcpop_m_b1(...) __riscv_vcpop_m_b1(__VA_ARGS__) |
| #define | vcpop_m_b16(...) __riscv_vcpop_m_b16(__VA_ARGS__) |
| #define | vcpop_m_b16_m(...) __riscv_vcpop_m_b16_m(__VA_ARGS__) |
| #define | vcpop_m_b1_m(...) __riscv_vcpop_m_b1_m(__VA_ARGS__) |
| #define | vcpop_m_b2(...) __riscv_vcpop_m_b2(__VA_ARGS__) |
| #define | vcpop_m_b2_m(...) __riscv_vcpop_m_b2_m(__VA_ARGS__) |
| #define | vcpop_m_b32(...) __riscv_vcpop_m_b32(__VA_ARGS__) |
| #define | vcpop_m_b32_m(...) __riscv_vcpop_m_b32_m(__VA_ARGS__) |
| #define | vcpop_m_b4(...) __riscv_vcpop_m_b4(__VA_ARGS__) |
| #define | vcpop_m_b4_m(...) __riscv_vcpop_m_b4_m(__VA_ARGS__) |
| #define | vcpop_m_b64(...) __riscv_vcpop_m_b64(__VA_ARGS__) |
| #define | vcpop_m_b64_m(...) __riscv_vcpop_m_b64_m(__VA_ARGS__) |
| #define | vcpop_m_b8(...) __riscv_vcpop_m_b8(__VA_ARGS__) |
| #define | vcpop_m_b8_m(...) __riscv_vcpop_m_b8_m(__VA_ARGS__) |
| #define | vdiv_vv_i16m1(...) __riscv_vdiv_vv_i16m1(__VA_ARGS__) |
| #define | vdiv_vv_i16m1_m(...) __riscv_vdiv_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i16m2(...) __riscv_vdiv_vv_i16m2(__VA_ARGS__) |
| #define | vdiv_vv_i16m2_m(...) __riscv_vdiv_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i16m4(...) __riscv_vdiv_vv_i16m4(__VA_ARGS__) |
| #define | vdiv_vv_i16m4_m(...) __riscv_vdiv_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i16m8(...) __riscv_vdiv_vv_i16m8(__VA_ARGS__) |
| #define | vdiv_vv_i16m8_m(...) __riscv_vdiv_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i16mf2(...) __riscv_vdiv_vv_i16mf2(__VA_ARGS__) |
| #define | vdiv_vv_i16mf2_m(...) __riscv_vdiv_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i16mf4(...) __riscv_vdiv_vv_i16mf4(__VA_ARGS__) |
| #define | vdiv_vv_i16mf4_m(...) __riscv_vdiv_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i32m1(...) __riscv_vdiv_vv_i32m1(__VA_ARGS__) |
| #define | vdiv_vv_i32m1_m(...) __riscv_vdiv_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i32m2(...) __riscv_vdiv_vv_i32m2(__VA_ARGS__) |
| #define | vdiv_vv_i32m2_m(...) __riscv_vdiv_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i32m4(...) __riscv_vdiv_vv_i32m4(__VA_ARGS__) |
| #define | vdiv_vv_i32m4_m(...) __riscv_vdiv_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i32m8(...) __riscv_vdiv_vv_i32m8(__VA_ARGS__) |
| #define | vdiv_vv_i32m8_m(...) __riscv_vdiv_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i32mf2(...) __riscv_vdiv_vv_i32mf2(__VA_ARGS__) |
| #define | vdiv_vv_i32mf2_m(...) __riscv_vdiv_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i64m1(...) __riscv_vdiv_vv_i64m1(__VA_ARGS__) |
| #define | vdiv_vv_i64m1_m(...) __riscv_vdiv_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i64m2(...) __riscv_vdiv_vv_i64m2(__VA_ARGS__) |
| #define | vdiv_vv_i64m2_m(...) __riscv_vdiv_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i64m4(...) __riscv_vdiv_vv_i64m4(__VA_ARGS__) |
| #define | vdiv_vv_i64m4_m(...) __riscv_vdiv_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i64m8(...) __riscv_vdiv_vv_i64m8(__VA_ARGS__) |
| #define | vdiv_vv_i64m8_m(...) __riscv_vdiv_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i8m1(...) __riscv_vdiv_vv_i8m1(__VA_ARGS__) |
| #define | vdiv_vv_i8m1_m(...) __riscv_vdiv_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i8m2(...) __riscv_vdiv_vv_i8m2(__VA_ARGS__) |
| #define | vdiv_vv_i8m2_m(...) __riscv_vdiv_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i8m4(...) __riscv_vdiv_vv_i8m4(__VA_ARGS__) |
| #define | vdiv_vv_i8m4_m(...) __riscv_vdiv_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i8m8(...) __riscv_vdiv_vv_i8m8(__VA_ARGS__) |
| #define | vdiv_vv_i8m8_m(...) __riscv_vdiv_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i8mf2(...) __riscv_vdiv_vv_i8mf2(__VA_ARGS__) |
| #define | vdiv_vv_i8mf2_m(...) __riscv_vdiv_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i8mf4(...) __riscv_vdiv_vv_i8mf4(__VA_ARGS__) |
| #define | vdiv_vv_i8mf4_m(...) __riscv_vdiv_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vdiv_vv_i8mf8(...) __riscv_vdiv_vv_i8mf8(__VA_ARGS__) |
| #define | vdiv_vv_i8mf8_m(...) __riscv_vdiv_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i16m1(...) __riscv_vdiv_vx_i16m1(__VA_ARGS__) |
| #define | vdiv_vx_i16m1_m(...) __riscv_vdiv_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i16m2(...) __riscv_vdiv_vx_i16m2(__VA_ARGS__) |
| #define | vdiv_vx_i16m2_m(...) __riscv_vdiv_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i16m4(...) __riscv_vdiv_vx_i16m4(__VA_ARGS__) |
| #define | vdiv_vx_i16m4_m(...) __riscv_vdiv_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i16m8(...) __riscv_vdiv_vx_i16m8(__VA_ARGS__) |
| #define | vdiv_vx_i16m8_m(...) __riscv_vdiv_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i16mf2(...) __riscv_vdiv_vx_i16mf2(__VA_ARGS__) |
| #define | vdiv_vx_i16mf2_m(...) __riscv_vdiv_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i16mf4(...) __riscv_vdiv_vx_i16mf4(__VA_ARGS__) |
| #define | vdiv_vx_i16mf4_m(...) __riscv_vdiv_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i32m1(...) __riscv_vdiv_vx_i32m1(__VA_ARGS__) |
| #define | vdiv_vx_i32m1_m(...) __riscv_vdiv_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i32m2(...) __riscv_vdiv_vx_i32m2(__VA_ARGS__) |
| #define | vdiv_vx_i32m2_m(...) __riscv_vdiv_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i32m4(...) __riscv_vdiv_vx_i32m4(__VA_ARGS__) |
| #define | vdiv_vx_i32m4_m(...) __riscv_vdiv_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i32m8(...) __riscv_vdiv_vx_i32m8(__VA_ARGS__) |
| #define | vdiv_vx_i32m8_m(...) __riscv_vdiv_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i32mf2(...) __riscv_vdiv_vx_i32mf2(__VA_ARGS__) |
| #define | vdiv_vx_i32mf2_m(...) __riscv_vdiv_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i64m1(...) __riscv_vdiv_vx_i64m1(__VA_ARGS__) |
| #define | vdiv_vx_i64m1_m(...) __riscv_vdiv_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i64m2(...) __riscv_vdiv_vx_i64m2(__VA_ARGS__) |
| #define | vdiv_vx_i64m2_m(...) __riscv_vdiv_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i64m4(...) __riscv_vdiv_vx_i64m4(__VA_ARGS__) |
| #define | vdiv_vx_i64m4_m(...) __riscv_vdiv_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i64m8(...) __riscv_vdiv_vx_i64m8(__VA_ARGS__) |
| #define | vdiv_vx_i64m8_m(...) __riscv_vdiv_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i8m1(...) __riscv_vdiv_vx_i8m1(__VA_ARGS__) |
| #define | vdiv_vx_i8m1_m(...) __riscv_vdiv_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i8m2(...) __riscv_vdiv_vx_i8m2(__VA_ARGS__) |
| #define | vdiv_vx_i8m2_m(...) __riscv_vdiv_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i8m4(...) __riscv_vdiv_vx_i8m4(__VA_ARGS__) |
| #define | vdiv_vx_i8m4_m(...) __riscv_vdiv_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i8m8(...) __riscv_vdiv_vx_i8m8(__VA_ARGS__) |
| #define | vdiv_vx_i8m8_m(...) __riscv_vdiv_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i8mf2(...) __riscv_vdiv_vx_i8mf2(__VA_ARGS__) |
| #define | vdiv_vx_i8mf2_m(...) __riscv_vdiv_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i8mf4(...) __riscv_vdiv_vx_i8mf4(__VA_ARGS__) |
| #define | vdiv_vx_i8mf4_m(...) __riscv_vdiv_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vdiv_vx_i8mf8(...) __riscv_vdiv_vx_i8mf8(__VA_ARGS__) |
| #define | vdiv_vx_i8mf8_m(...) __riscv_vdiv_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u16m1(...) __riscv_vdivu_vv_u16m1(__VA_ARGS__) |
| #define | vdivu_vv_u16m1_m(...) __riscv_vdivu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u16m2(...) __riscv_vdivu_vv_u16m2(__VA_ARGS__) |
| #define | vdivu_vv_u16m2_m(...) __riscv_vdivu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u16m4(...) __riscv_vdivu_vv_u16m4(__VA_ARGS__) |
| #define | vdivu_vv_u16m4_m(...) __riscv_vdivu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u16m8(...) __riscv_vdivu_vv_u16m8(__VA_ARGS__) |
| #define | vdivu_vv_u16m8_m(...) __riscv_vdivu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u16mf2(...) __riscv_vdivu_vv_u16mf2(__VA_ARGS__) |
| #define | vdivu_vv_u16mf2_m(...) __riscv_vdivu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u16mf4(...) __riscv_vdivu_vv_u16mf4(__VA_ARGS__) |
| #define | vdivu_vv_u16mf4_m(...) __riscv_vdivu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u32m1(...) __riscv_vdivu_vv_u32m1(__VA_ARGS__) |
| #define | vdivu_vv_u32m1_m(...) __riscv_vdivu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u32m2(...) __riscv_vdivu_vv_u32m2(__VA_ARGS__) |
| #define | vdivu_vv_u32m2_m(...) __riscv_vdivu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u32m4(...) __riscv_vdivu_vv_u32m4(__VA_ARGS__) |
| #define | vdivu_vv_u32m4_m(...) __riscv_vdivu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u32m8(...) __riscv_vdivu_vv_u32m8(__VA_ARGS__) |
| #define | vdivu_vv_u32m8_m(...) __riscv_vdivu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u32mf2(...) __riscv_vdivu_vv_u32mf2(__VA_ARGS__) |
| #define | vdivu_vv_u32mf2_m(...) __riscv_vdivu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u64m1(...) __riscv_vdivu_vv_u64m1(__VA_ARGS__) |
| #define | vdivu_vv_u64m1_m(...) __riscv_vdivu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u64m2(...) __riscv_vdivu_vv_u64m2(__VA_ARGS__) |
| #define | vdivu_vv_u64m2_m(...) __riscv_vdivu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u64m4(...) __riscv_vdivu_vv_u64m4(__VA_ARGS__) |
| #define | vdivu_vv_u64m4_m(...) __riscv_vdivu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u64m8(...) __riscv_vdivu_vv_u64m8(__VA_ARGS__) |
| #define | vdivu_vv_u64m8_m(...) __riscv_vdivu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u8m1(...) __riscv_vdivu_vv_u8m1(__VA_ARGS__) |
| #define | vdivu_vv_u8m1_m(...) __riscv_vdivu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u8m2(...) __riscv_vdivu_vv_u8m2(__VA_ARGS__) |
| #define | vdivu_vv_u8m2_m(...) __riscv_vdivu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u8m4(...) __riscv_vdivu_vv_u8m4(__VA_ARGS__) |
| #define | vdivu_vv_u8m4_m(...) __riscv_vdivu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u8m8(...) __riscv_vdivu_vv_u8m8(__VA_ARGS__) |
| #define | vdivu_vv_u8m8_m(...) __riscv_vdivu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u8mf2(...) __riscv_vdivu_vv_u8mf2(__VA_ARGS__) |
| #define | vdivu_vv_u8mf2_m(...) __riscv_vdivu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u8mf4(...) __riscv_vdivu_vv_u8mf4(__VA_ARGS__) |
| #define | vdivu_vv_u8mf4_m(...) __riscv_vdivu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vdivu_vv_u8mf8(...) __riscv_vdivu_vv_u8mf8(__VA_ARGS__) |
| #define | vdivu_vv_u8mf8_m(...) __riscv_vdivu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u16m1(...) __riscv_vdivu_vx_u16m1(__VA_ARGS__) |
| #define | vdivu_vx_u16m1_m(...) __riscv_vdivu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u16m2(...) __riscv_vdivu_vx_u16m2(__VA_ARGS__) |
| #define | vdivu_vx_u16m2_m(...) __riscv_vdivu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u16m4(...) __riscv_vdivu_vx_u16m4(__VA_ARGS__) |
| #define | vdivu_vx_u16m4_m(...) __riscv_vdivu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u16m8(...) __riscv_vdivu_vx_u16m8(__VA_ARGS__) |
| #define | vdivu_vx_u16m8_m(...) __riscv_vdivu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u16mf2(...) __riscv_vdivu_vx_u16mf2(__VA_ARGS__) |
| #define | vdivu_vx_u16mf2_m(...) __riscv_vdivu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u16mf4(...) __riscv_vdivu_vx_u16mf4(__VA_ARGS__) |
| #define | vdivu_vx_u16mf4_m(...) __riscv_vdivu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u32m1(...) __riscv_vdivu_vx_u32m1(__VA_ARGS__) |
| #define | vdivu_vx_u32m1_m(...) __riscv_vdivu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u32m2(...) __riscv_vdivu_vx_u32m2(__VA_ARGS__) |
| #define | vdivu_vx_u32m2_m(...) __riscv_vdivu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u32m4(...) __riscv_vdivu_vx_u32m4(__VA_ARGS__) |
| #define | vdivu_vx_u32m4_m(...) __riscv_vdivu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u32m8(...) __riscv_vdivu_vx_u32m8(__VA_ARGS__) |
| #define | vdivu_vx_u32m8_m(...) __riscv_vdivu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u32mf2(...) __riscv_vdivu_vx_u32mf2(__VA_ARGS__) |
| #define | vdivu_vx_u32mf2_m(...) __riscv_vdivu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u64m1(...) __riscv_vdivu_vx_u64m1(__VA_ARGS__) |
| #define | vdivu_vx_u64m1_m(...) __riscv_vdivu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u64m2(...) __riscv_vdivu_vx_u64m2(__VA_ARGS__) |
| #define | vdivu_vx_u64m2_m(...) __riscv_vdivu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u64m4(...) __riscv_vdivu_vx_u64m4(__VA_ARGS__) |
| #define | vdivu_vx_u64m4_m(...) __riscv_vdivu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u64m8(...) __riscv_vdivu_vx_u64m8(__VA_ARGS__) |
| #define | vdivu_vx_u64m8_m(...) __riscv_vdivu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u8m1(...) __riscv_vdivu_vx_u8m1(__VA_ARGS__) |
| #define | vdivu_vx_u8m1_m(...) __riscv_vdivu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u8m2(...) __riscv_vdivu_vx_u8m2(__VA_ARGS__) |
| #define | vdivu_vx_u8m2_m(...) __riscv_vdivu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u8m4(...) __riscv_vdivu_vx_u8m4(__VA_ARGS__) |
| #define | vdivu_vx_u8m4_m(...) __riscv_vdivu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u8m8(...) __riscv_vdivu_vx_u8m8(__VA_ARGS__) |
| #define | vdivu_vx_u8m8_m(...) __riscv_vdivu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u8mf2(...) __riscv_vdivu_vx_u8mf2(__VA_ARGS__) |
| #define | vdivu_vx_u8mf2_m(...) __riscv_vdivu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u8mf4(...) __riscv_vdivu_vx_u8mf4(__VA_ARGS__) |
| #define | vdivu_vx_u8mf4_m(...) __riscv_vdivu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vdivu_vx_u8mf8(...) __riscv_vdivu_vx_u8mf8(__VA_ARGS__) |
| #define | vdivu_vx_u8mf8_m(...) __riscv_vdivu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vfabs_v_f16m1(...) __riscv_vfabs_v_f16m1(__VA_ARGS__) |
| #define | vfabs_v_f16m1_m(...) __riscv_vfabs_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfabs_v_f16m2(...) __riscv_vfabs_v_f16m2(__VA_ARGS__) |
| #define | vfabs_v_f16m2_m(...) __riscv_vfabs_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfabs_v_f16m4(...) __riscv_vfabs_v_f16m4(__VA_ARGS__) |
| #define | vfabs_v_f16m4_m(...) __riscv_vfabs_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfabs_v_f16m8(...) __riscv_vfabs_v_f16m8(__VA_ARGS__) |
| #define | vfabs_v_f16m8_m(...) __riscv_vfabs_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfabs_v_f16mf2(...) __riscv_vfabs_v_f16mf2(__VA_ARGS__) |
| #define | vfabs_v_f16mf2_m(...) __riscv_vfabs_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfabs_v_f16mf4(...) __riscv_vfabs_v_f16mf4(__VA_ARGS__) |
| #define | vfabs_v_f16mf4_m(...) __riscv_vfabs_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfabs_v_f32m1(...) __riscv_vfabs_v_f32m1(__VA_ARGS__) |
| #define | vfabs_v_f32m1_m(...) __riscv_vfabs_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfabs_v_f32m2(...) __riscv_vfabs_v_f32m2(__VA_ARGS__) |
| #define | vfabs_v_f32m2_m(...) __riscv_vfabs_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfabs_v_f32m4(...) __riscv_vfabs_v_f32m4(__VA_ARGS__) |
| #define | vfabs_v_f32m4_m(...) __riscv_vfabs_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfabs_v_f32m8(...) __riscv_vfabs_v_f32m8(__VA_ARGS__) |
| #define | vfabs_v_f32m8_m(...) __riscv_vfabs_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfabs_v_f32mf2(...) __riscv_vfabs_v_f32mf2(__VA_ARGS__) |
| #define | vfabs_v_f32mf2_m(...) __riscv_vfabs_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfabs_v_f64m1(...) __riscv_vfabs_v_f64m1(__VA_ARGS__) |
| #define | vfabs_v_f64m1_m(...) __riscv_vfabs_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfabs_v_f64m2(...) __riscv_vfabs_v_f64m2(__VA_ARGS__) |
| #define | vfabs_v_f64m2_m(...) __riscv_vfabs_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfabs_v_f64m4(...) __riscv_vfabs_v_f64m4(__VA_ARGS__) |
| #define | vfabs_v_f64m4_m(...) __riscv_vfabs_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfabs_v_f64m8(...) __riscv_vfabs_v_f64m8(__VA_ARGS__) |
| #define | vfabs_v_f64m8_m(...) __riscv_vfabs_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f16m1(...) __riscv_vfadd_vf_f16m1(__VA_ARGS__) |
| #define | vfadd_vf_f16m1_m(...) __riscv_vfadd_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f16m2(...) __riscv_vfadd_vf_f16m2(__VA_ARGS__) |
| #define | vfadd_vf_f16m2_m(...) __riscv_vfadd_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f16m4(...) __riscv_vfadd_vf_f16m4(__VA_ARGS__) |
| #define | vfadd_vf_f16m4_m(...) __riscv_vfadd_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f16m8(...) __riscv_vfadd_vf_f16m8(__VA_ARGS__) |
| #define | vfadd_vf_f16m8_m(...) __riscv_vfadd_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f16mf2(...) __riscv_vfadd_vf_f16mf2(__VA_ARGS__) |
| #define | vfadd_vf_f16mf2_m(...) __riscv_vfadd_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f16mf4(...) __riscv_vfadd_vf_f16mf4(__VA_ARGS__) |
| #define | vfadd_vf_f16mf4_m(...) __riscv_vfadd_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f32m1(...) __riscv_vfadd_vf_f32m1(__VA_ARGS__) |
| #define | vfadd_vf_f32m1_m(...) __riscv_vfadd_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f32m2(...) __riscv_vfadd_vf_f32m2(__VA_ARGS__) |
| #define | vfadd_vf_f32m2_m(...) __riscv_vfadd_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f32m4(...) __riscv_vfadd_vf_f32m4(__VA_ARGS__) |
| #define | vfadd_vf_f32m4_m(...) __riscv_vfadd_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f32m8(...) __riscv_vfadd_vf_f32m8(__VA_ARGS__) |
| #define | vfadd_vf_f32m8_m(...) __riscv_vfadd_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f32mf2(...) __riscv_vfadd_vf_f32mf2(__VA_ARGS__) |
| #define | vfadd_vf_f32mf2_m(...) __riscv_vfadd_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f64m1(...) __riscv_vfadd_vf_f64m1(__VA_ARGS__) |
| #define | vfadd_vf_f64m1_m(...) __riscv_vfadd_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f64m2(...) __riscv_vfadd_vf_f64m2(__VA_ARGS__) |
| #define | vfadd_vf_f64m2_m(...) __riscv_vfadd_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f64m4(...) __riscv_vfadd_vf_f64m4(__VA_ARGS__) |
| #define | vfadd_vf_f64m4_m(...) __riscv_vfadd_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfadd_vf_f64m8(...) __riscv_vfadd_vf_f64m8(__VA_ARGS__) |
| #define | vfadd_vf_f64m8_m(...) __riscv_vfadd_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f16m1(...) __riscv_vfadd_vv_f16m1(__VA_ARGS__) |
| #define | vfadd_vv_f16m1_m(...) __riscv_vfadd_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f16m2(...) __riscv_vfadd_vv_f16m2(__VA_ARGS__) |
| #define | vfadd_vv_f16m2_m(...) __riscv_vfadd_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f16m4(...) __riscv_vfadd_vv_f16m4(__VA_ARGS__) |
| #define | vfadd_vv_f16m4_m(...) __riscv_vfadd_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f16m8(...) __riscv_vfadd_vv_f16m8(__VA_ARGS__) |
| #define | vfadd_vv_f16m8_m(...) __riscv_vfadd_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f16mf2(...) __riscv_vfadd_vv_f16mf2(__VA_ARGS__) |
| #define | vfadd_vv_f16mf2_m(...) __riscv_vfadd_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f16mf4(...) __riscv_vfadd_vv_f16mf4(__VA_ARGS__) |
| #define | vfadd_vv_f16mf4_m(...) __riscv_vfadd_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f32m1(...) __riscv_vfadd_vv_f32m1(__VA_ARGS__) |
| #define | vfadd_vv_f32m1_m(...) __riscv_vfadd_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f32m2(...) __riscv_vfadd_vv_f32m2(__VA_ARGS__) |
| #define | vfadd_vv_f32m2_m(...) __riscv_vfadd_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f32m4(...) __riscv_vfadd_vv_f32m4(__VA_ARGS__) |
| #define | vfadd_vv_f32m4_m(...) __riscv_vfadd_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f32m8(...) __riscv_vfadd_vv_f32m8(__VA_ARGS__) |
| #define | vfadd_vv_f32m8_m(...) __riscv_vfadd_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f32mf2(...) __riscv_vfadd_vv_f32mf2(__VA_ARGS__) |
| #define | vfadd_vv_f32mf2_m(...) __riscv_vfadd_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f64m1(...) __riscv_vfadd_vv_f64m1(__VA_ARGS__) |
| #define | vfadd_vv_f64m1_m(...) __riscv_vfadd_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f64m2(...) __riscv_vfadd_vv_f64m2(__VA_ARGS__) |
| #define | vfadd_vv_f64m2_m(...) __riscv_vfadd_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f64m4(...) __riscv_vfadd_vv_f64m4(__VA_ARGS__) |
| #define | vfadd_vv_f64m4_m(...) __riscv_vfadd_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfadd_vv_f64m8(...) __riscv_vfadd_vv_f64m8(__VA_ARGS__) |
| #define | vfadd_vv_f64m8_m(...) __riscv_vfadd_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfclass_v_u16m1(...) __riscv_vfclass_v_u16m1(__VA_ARGS__) |
| #define | vfclass_v_u16m1_m(...) __riscv_vfclass_v_u16m1_tumu(__VA_ARGS__) |
| #define | vfclass_v_u16m2(...) __riscv_vfclass_v_u16m2(__VA_ARGS__) |
| #define | vfclass_v_u16m2_m(...) __riscv_vfclass_v_u16m2_tumu(__VA_ARGS__) |
| #define | vfclass_v_u16m4(...) __riscv_vfclass_v_u16m4(__VA_ARGS__) |
| #define | vfclass_v_u16m4_m(...) __riscv_vfclass_v_u16m4_tumu(__VA_ARGS__) |
| #define | vfclass_v_u16m8(...) __riscv_vfclass_v_u16m8(__VA_ARGS__) |
| #define | vfclass_v_u16m8_m(...) __riscv_vfclass_v_u16m8_tumu(__VA_ARGS__) |
| #define | vfclass_v_u16mf2(...) __riscv_vfclass_v_u16mf2(__VA_ARGS__) |
| #define | vfclass_v_u16mf2_m(...) __riscv_vfclass_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vfclass_v_u16mf4(...) __riscv_vfclass_v_u16mf4(__VA_ARGS__) |
| #define | vfclass_v_u16mf4_m(...) __riscv_vfclass_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vfclass_v_u32m1(...) __riscv_vfclass_v_u32m1(__VA_ARGS__) |
| #define | vfclass_v_u32m1_m(...) __riscv_vfclass_v_u32m1_tumu(__VA_ARGS__) |
| #define | vfclass_v_u32m2(...) __riscv_vfclass_v_u32m2(__VA_ARGS__) |
| #define | vfclass_v_u32m2_m(...) __riscv_vfclass_v_u32m2_tumu(__VA_ARGS__) |
| #define | vfclass_v_u32m4(...) __riscv_vfclass_v_u32m4(__VA_ARGS__) |
| #define | vfclass_v_u32m4_m(...) __riscv_vfclass_v_u32m4_tumu(__VA_ARGS__) |
| #define | vfclass_v_u32m8(...) __riscv_vfclass_v_u32m8(__VA_ARGS__) |
| #define | vfclass_v_u32m8_m(...) __riscv_vfclass_v_u32m8_tumu(__VA_ARGS__) |
| #define | vfclass_v_u32mf2(...) __riscv_vfclass_v_u32mf2(__VA_ARGS__) |
| #define | vfclass_v_u32mf2_m(...) __riscv_vfclass_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vfclass_v_u64m1(...) __riscv_vfclass_v_u64m1(__VA_ARGS__) |
| #define | vfclass_v_u64m1_m(...) __riscv_vfclass_v_u64m1_tumu(__VA_ARGS__) |
| #define | vfclass_v_u64m2(...) __riscv_vfclass_v_u64m2(__VA_ARGS__) |
| #define | vfclass_v_u64m2_m(...) __riscv_vfclass_v_u64m2_tumu(__VA_ARGS__) |
| #define | vfclass_v_u64m4(...) __riscv_vfclass_v_u64m4(__VA_ARGS__) |
| #define | vfclass_v_u64m4_m(...) __riscv_vfclass_v_u64m4_tumu(__VA_ARGS__) |
| #define | vfclass_v_u64m8(...) __riscv_vfclass_v_u64m8(__VA_ARGS__) |
| #define | vfclass_v_u64m8_m(...) __riscv_vfclass_v_u64m8_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16m1(...) __riscv_vfcvt_f_x_v_f16m1(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16m1_m(...) __riscv_vfcvt_f_x_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16m2(...) __riscv_vfcvt_f_x_v_f16m2(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16m2_m(...) __riscv_vfcvt_f_x_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16m4(...) __riscv_vfcvt_f_x_v_f16m4(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16m4_m(...) __riscv_vfcvt_f_x_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16m8(...) __riscv_vfcvt_f_x_v_f16m8(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16m8_m(...) __riscv_vfcvt_f_x_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16mf2(...) __riscv_vfcvt_f_x_v_f16mf2(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16mf2_m(...) __riscv_vfcvt_f_x_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16mf4(...) __riscv_vfcvt_f_x_v_f16mf4(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f16mf4_m(...) __riscv_vfcvt_f_x_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32m1(...) __riscv_vfcvt_f_x_v_f32m1(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32m1_m(...) __riscv_vfcvt_f_x_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32m2(...) __riscv_vfcvt_f_x_v_f32m2(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32m2_m(...) __riscv_vfcvt_f_x_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32m4(...) __riscv_vfcvt_f_x_v_f32m4(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32m4_m(...) __riscv_vfcvt_f_x_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32m8(...) __riscv_vfcvt_f_x_v_f32m8(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32m8_m(...) __riscv_vfcvt_f_x_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32mf2(...) __riscv_vfcvt_f_x_v_f32mf2(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f32mf2_m(...) __riscv_vfcvt_f_x_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f64m1(...) __riscv_vfcvt_f_x_v_f64m1(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f64m1_m(...) __riscv_vfcvt_f_x_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f64m2(...) __riscv_vfcvt_f_x_v_f64m2(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f64m2_m(...) __riscv_vfcvt_f_x_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f64m4(...) __riscv_vfcvt_f_x_v_f64m4(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f64m4_m(...) __riscv_vfcvt_f_x_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f64m8(...) __riscv_vfcvt_f_x_v_f64m8(__VA_ARGS__) |
| #define | vfcvt_f_x_v_f64m8_m(...) __riscv_vfcvt_f_x_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16m1(...) __riscv_vfcvt_f_xu_v_f16m1(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16m1_m(...) __riscv_vfcvt_f_xu_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16m2(...) __riscv_vfcvt_f_xu_v_f16m2(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16m2_m(...) __riscv_vfcvt_f_xu_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16m4(...) __riscv_vfcvt_f_xu_v_f16m4(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16m4_m(...) __riscv_vfcvt_f_xu_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16m8(...) __riscv_vfcvt_f_xu_v_f16m8(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16m8_m(...) __riscv_vfcvt_f_xu_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16mf2(...) __riscv_vfcvt_f_xu_v_f16mf2(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16mf2_m(...) __riscv_vfcvt_f_xu_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16mf4(...) __riscv_vfcvt_f_xu_v_f16mf4(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f16mf4_m(...) __riscv_vfcvt_f_xu_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32m1(...) __riscv_vfcvt_f_xu_v_f32m1(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32m1_m(...) __riscv_vfcvt_f_xu_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32m2(...) __riscv_vfcvt_f_xu_v_f32m2(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32m2_m(...) __riscv_vfcvt_f_xu_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32m4(...) __riscv_vfcvt_f_xu_v_f32m4(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32m4_m(...) __riscv_vfcvt_f_xu_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32m8(...) __riscv_vfcvt_f_xu_v_f32m8(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32m8_m(...) __riscv_vfcvt_f_xu_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32mf2(...) __riscv_vfcvt_f_xu_v_f32mf2(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f32mf2_m(...) __riscv_vfcvt_f_xu_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f64m1(...) __riscv_vfcvt_f_xu_v_f64m1(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f64m1_m(...) __riscv_vfcvt_f_xu_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f64m2(...) __riscv_vfcvt_f_xu_v_f64m2(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f64m2_m(...) __riscv_vfcvt_f_xu_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f64m4(...) __riscv_vfcvt_f_xu_v_f64m4(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f64m4_m(...) __riscv_vfcvt_f_xu_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f64m8(...) __riscv_vfcvt_f_xu_v_f64m8(__VA_ARGS__) |
| #define | vfcvt_f_xu_v_f64m8_m(...) __riscv_vfcvt_f_xu_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16m1(...) __riscv_vfcvt_rtz_x_f_v_i16m1(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16m1_m(...) __riscv_vfcvt_rtz_x_f_v_i16m1_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16m2(...) __riscv_vfcvt_rtz_x_f_v_i16m2(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16m2_m(...) __riscv_vfcvt_rtz_x_f_v_i16m2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16m4(...) __riscv_vfcvt_rtz_x_f_v_i16m4(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16m4_m(...) __riscv_vfcvt_rtz_x_f_v_i16m4_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16m8(...) __riscv_vfcvt_rtz_x_f_v_i16m8(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16m8_m(...) __riscv_vfcvt_rtz_x_f_v_i16m8_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16mf2(...) __riscv_vfcvt_rtz_x_f_v_i16mf2(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16mf2_m(...) __riscv_vfcvt_rtz_x_f_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16mf4(...) __riscv_vfcvt_rtz_x_f_v_i16mf4(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i16mf4_m(...) __riscv_vfcvt_rtz_x_f_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32m1(...) __riscv_vfcvt_rtz_x_f_v_i32m1(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32m1_m(...) __riscv_vfcvt_rtz_x_f_v_i32m1_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32m2(...) __riscv_vfcvt_rtz_x_f_v_i32m2(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32m2_m(...) __riscv_vfcvt_rtz_x_f_v_i32m2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32m4(...) __riscv_vfcvt_rtz_x_f_v_i32m4(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32m4_m(...) __riscv_vfcvt_rtz_x_f_v_i32m4_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32m8(...) __riscv_vfcvt_rtz_x_f_v_i32m8(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32m8_m(...) __riscv_vfcvt_rtz_x_f_v_i32m8_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32mf2(...) __riscv_vfcvt_rtz_x_f_v_i32mf2(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i32mf2_m(...) __riscv_vfcvt_rtz_x_f_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i64m1(...) __riscv_vfcvt_rtz_x_f_v_i64m1(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i64m1_m(...) __riscv_vfcvt_rtz_x_f_v_i64m1_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i64m2(...) __riscv_vfcvt_rtz_x_f_v_i64m2(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i64m2_m(...) __riscv_vfcvt_rtz_x_f_v_i64m2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i64m4(...) __riscv_vfcvt_rtz_x_f_v_i64m4(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i64m4_m(...) __riscv_vfcvt_rtz_x_f_v_i64m4_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i64m8(...) __riscv_vfcvt_rtz_x_f_v_i64m8(__VA_ARGS__) |
| #define | vfcvt_rtz_x_f_v_i64m8_m(...) __riscv_vfcvt_rtz_x_f_v_i64m8_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16m1(...) __riscv_vfcvt_rtz_xu_f_v_u16m1(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16m1_m(...) __riscv_vfcvt_rtz_xu_f_v_u16m1_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16m2(...) __riscv_vfcvt_rtz_xu_f_v_u16m2(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16m2_m(...) __riscv_vfcvt_rtz_xu_f_v_u16m2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16m4(...) __riscv_vfcvt_rtz_xu_f_v_u16m4(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16m4_m(...) __riscv_vfcvt_rtz_xu_f_v_u16m4_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16m8(...) __riscv_vfcvt_rtz_xu_f_v_u16m8(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16m8_m(...) __riscv_vfcvt_rtz_xu_f_v_u16m8_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16mf2(...) __riscv_vfcvt_rtz_xu_f_v_u16mf2(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16mf2_m(...) __riscv_vfcvt_rtz_xu_f_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16mf4(...) __riscv_vfcvt_rtz_xu_f_v_u16mf4(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u16mf4_m(...) __riscv_vfcvt_rtz_xu_f_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32m1(...) __riscv_vfcvt_rtz_xu_f_v_u32m1(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32m1_m(...) __riscv_vfcvt_rtz_xu_f_v_u32m1_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32m2(...) __riscv_vfcvt_rtz_xu_f_v_u32m2(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32m2_m(...) __riscv_vfcvt_rtz_xu_f_v_u32m2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32m4(...) __riscv_vfcvt_rtz_xu_f_v_u32m4(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32m4_m(...) __riscv_vfcvt_rtz_xu_f_v_u32m4_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32m8(...) __riscv_vfcvt_rtz_xu_f_v_u32m8(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32m8_m(...) __riscv_vfcvt_rtz_xu_f_v_u32m8_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32mf2(...) __riscv_vfcvt_rtz_xu_f_v_u32mf2(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u32mf2_m(...) __riscv_vfcvt_rtz_xu_f_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u64m1(...) __riscv_vfcvt_rtz_xu_f_v_u64m1(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u64m1_m(...) __riscv_vfcvt_rtz_xu_f_v_u64m1_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u64m2(...) __riscv_vfcvt_rtz_xu_f_v_u64m2(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u64m2_m(...) __riscv_vfcvt_rtz_xu_f_v_u64m2_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u64m4(...) __riscv_vfcvt_rtz_xu_f_v_u64m4(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u64m4_m(...) __riscv_vfcvt_rtz_xu_f_v_u64m4_tumu(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u64m8(...) __riscv_vfcvt_rtz_xu_f_v_u64m8(__VA_ARGS__) |
| #define | vfcvt_rtz_xu_f_v_u64m8_m(...) __riscv_vfcvt_rtz_xu_f_v_u64m8_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16m1(...) __riscv_vfcvt_x_f_v_i16m1(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16m1_m(...) __riscv_vfcvt_x_f_v_i16m1_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16m2(...) __riscv_vfcvt_x_f_v_i16m2(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16m2_m(...) __riscv_vfcvt_x_f_v_i16m2_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16m4(...) __riscv_vfcvt_x_f_v_i16m4(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16m4_m(...) __riscv_vfcvt_x_f_v_i16m4_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16m8(...) __riscv_vfcvt_x_f_v_i16m8(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16m8_m(...) __riscv_vfcvt_x_f_v_i16m8_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16mf2(...) __riscv_vfcvt_x_f_v_i16mf2(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16mf2_m(...) __riscv_vfcvt_x_f_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16mf4(...) __riscv_vfcvt_x_f_v_i16mf4(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i16mf4_m(...) __riscv_vfcvt_x_f_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32m1(...) __riscv_vfcvt_x_f_v_i32m1(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32m1_m(...) __riscv_vfcvt_x_f_v_i32m1_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32m2(...) __riscv_vfcvt_x_f_v_i32m2(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32m2_m(...) __riscv_vfcvt_x_f_v_i32m2_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32m4(...) __riscv_vfcvt_x_f_v_i32m4(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32m4_m(...) __riscv_vfcvt_x_f_v_i32m4_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32m8(...) __riscv_vfcvt_x_f_v_i32m8(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32m8_m(...) __riscv_vfcvt_x_f_v_i32m8_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32mf2(...) __riscv_vfcvt_x_f_v_i32mf2(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i32mf2_m(...) __riscv_vfcvt_x_f_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i64m1(...) __riscv_vfcvt_x_f_v_i64m1(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i64m1_m(...) __riscv_vfcvt_x_f_v_i64m1_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i64m2(...) __riscv_vfcvt_x_f_v_i64m2(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i64m2_m(...) __riscv_vfcvt_x_f_v_i64m2_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i64m4(...) __riscv_vfcvt_x_f_v_i64m4(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i64m4_m(...) __riscv_vfcvt_x_f_v_i64m4_tumu(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i64m8(...) __riscv_vfcvt_x_f_v_i64m8(__VA_ARGS__) |
| #define | vfcvt_x_f_v_i64m8_m(...) __riscv_vfcvt_x_f_v_i64m8_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16m1(...) __riscv_vfcvt_xu_f_v_u16m1(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16m1_m(...) __riscv_vfcvt_xu_f_v_u16m1_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16m2(...) __riscv_vfcvt_xu_f_v_u16m2(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16m2_m(...) __riscv_vfcvt_xu_f_v_u16m2_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16m4(...) __riscv_vfcvt_xu_f_v_u16m4(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16m4_m(...) __riscv_vfcvt_xu_f_v_u16m4_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16m8(...) __riscv_vfcvt_xu_f_v_u16m8(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16m8_m(...) __riscv_vfcvt_xu_f_v_u16m8_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16mf2(...) __riscv_vfcvt_xu_f_v_u16mf2(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16mf2_m(...) __riscv_vfcvt_xu_f_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16mf4(...) __riscv_vfcvt_xu_f_v_u16mf4(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u16mf4_m(...) __riscv_vfcvt_xu_f_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32m1(...) __riscv_vfcvt_xu_f_v_u32m1(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32m1_m(...) __riscv_vfcvt_xu_f_v_u32m1_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32m2(...) __riscv_vfcvt_xu_f_v_u32m2(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32m2_m(...) __riscv_vfcvt_xu_f_v_u32m2_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32m4(...) __riscv_vfcvt_xu_f_v_u32m4(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32m4_m(...) __riscv_vfcvt_xu_f_v_u32m4_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32m8(...) __riscv_vfcvt_xu_f_v_u32m8(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32m8_m(...) __riscv_vfcvt_xu_f_v_u32m8_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32mf2(...) __riscv_vfcvt_xu_f_v_u32mf2(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u32mf2_m(...) __riscv_vfcvt_xu_f_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u64m1(...) __riscv_vfcvt_xu_f_v_u64m1(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u64m1_m(...) __riscv_vfcvt_xu_f_v_u64m1_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u64m2(...) __riscv_vfcvt_xu_f_v_u64m2(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u64m2_m(...) __riscv_vfcvt_xu_f_v_u64m2_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u64m4(...) __riscv_vfcvt_xu_f_v_u64m4(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u64m4_m(...) __riscv_vfcvt_xu_f_v_u64m4_tumu(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u64m8(...) __riscv_vfcvt_xu_f_v_u64m8(__VA_ARGS__) |
| #define | vfcvt_xu_f_v_u64m8_m(...) __riscv_vfcvt_xu_f_v_u64m8_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f16m1(...) __riscv_vfdiv_vf_f16m1(__VA_ARGS__) |
| #define | vfdiv_vf_f16m1_m(...) __riscv_vfdiv_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f16m2(...) __riscv_vfdiv_vf_f16m2(__VA_ARGS__) |
| #define | vfdiv_vf_f16m2_m(...) __riscv_vfdiv_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f16m4(...) __riscv_vfdiv_vf_f16m4(__VA_ARGS__) |
| #define | vfdiv_vf_f16m4_m(...) __riscv_vfdiv_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f16m8(...) __riscv_vfdiv_vf_f16m8(__VA_ARGS__) |
| #define | vfdiv_vf_f16m8_m(...) __riscv_vfdiv_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f16mf2(...) __riscv_vfdiv_vf_f16mf2(__VA_ARGS__) |
| #define | vfdiv_vf_f16mf2_m(...) __riscv_vfdiv_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f16mf4(...) __riscv_vfdiv_vf_f16mf4(__VA_ARGS__) |
| #define | vfdiv_vf_f16mf4_m(...) __riscv_vfdiv_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f32m1(...) __riscv_vfdiv_vf_f32m1(__VA_ARGS__) |
| #define | vfdiv_vf_f32m1_m(...) __riscv_vfdiv_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f32m2(...) __riscv_vfdiv_vf_f32m2(__VA_ARGS__) |
| #define | vfdiv_vf_f32m2_m(...) __riscv_vfdiv_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f32m4(...) __riscv_vfdiv_vf_f32m4(__VA_ARGS__) |
| #define | vfdiv_vf_f32m4_m(...) __riscv_vfdiv_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f32m8(...) __riscv_vfdiv_vf_f32m8(__VA_ARGS__) |
| #define | vfdiv_vf_f32m8_m(...) __riscv_vfdiv_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f32mf2(...) __riscv_vfdiv_vf_f32mf2(__VA_ARGS__) |
| #define | vfdiv_vf_f32mf2_m(...) __riscv_vfdiv_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f64m1(...) __riscv_vfdiv_vf_f64m1(__VA_ARGS__) |
| #define | vfdiv_vf_f64m1_m(...) __riscv_vfdiv_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f64m2(...) __riscv_vfdiv_vf_f64m2(__VA_ARGS__) |
| #define | vfdiv_vf_f64m2_m(...) __riscv_vfdiv_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f64m4(...) __riscv_vfdiv_vf_f64m4(__VA_ARGS__) |
| #define | vfdiv_vf_f64m4_m(...) __riscv_vfdiv_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfdiv_vf_f64m8(...) __riscv_vfdiv_vf_f64m8(__VA_ARGS__) |
| #define | vfdiv_vf_f64m8_m(...) __riscv_vfdiv_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f16m1(...) __riscv_vfdiv_vv_f16m1(__VA_ARGS__) |
| #define | vfdiv_vv_f16m1_m(...) __riscv_vfdiv_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f16m2(...) __riscv_vfdiv_vv_f16m2(__VA_ARGS__) |
| #define | vfdiv_vv_f16m2_m(...) __riscv_vfdiv_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f16m4(...) __riscv_vfdiv_vv_f16m4(__VA_ARGS__) |
| #define | vfdiv_vv_f16m4_m(...) __riscv_vfdiv_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f16m8(...) __riscv_vfdiv_vv_f16m8(__VA_ARGS__) |
| #define | vfdiv_vv_f16m8_m(...) __riscv_vfdiv_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f16mf2(...) __riscv_vfdiv_vv_f16mf2(__VA_ARGS__) |
| #define | vfdiv_vv_f16mf2_m(...) __riscv_vfdiv_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f16mf4(...) __riscv_vfdiv_vv_f16mf4(__VA_ARGS__) |
| #define | vfdiv_vv_f16mf4_m(...) __riscv_vfdiv_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f32m1(...) __riscv_vfdiv_vv_f32m1(__VA_ARGS__) |
| #define | vfdiv_vv_f32m1_m(...) __riscv_vfdiv_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f32m2(...) __riscv_vfdiv_vv_f32m2(__VA_ARGS__) |
| #define | vfdiv_vv_f32m2_m(...) __riscv_vfdiv_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f32m4(...) __riscv_vfdiv_vv_f32m4(__VA_ARGS__) |
| #define | vfdiv_vv_f32m4_m(...) __riscv_vfdiv_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f32m8(...) __riscv_vfdiv_vv_f32m8(__VA_ARGS__) |
| #define | vfdiv_vv_f32m8_m(...) __riscv_vfdiv_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f32mf2(...) __riscv_vfdiv_vv_f32mf2(__VA_ARGS__) |
| #define | vfdiv_vv_f32mf2_m(...) __riscv_vfdiv_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f64m1(...) __riscv_vfdiv_vv_f64m1(__VA_ARGS__) |
| #define | vfdiv_vv_f64m1_m(...) __riscv_vfdiv_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f64m2(...) __riscv_vfdiv_vv_f64m2(__VA_ARGS__) |
| #define | vfdiv_vv_f64m2_m(...) __riscv_vfdiv_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f64m4(...) __riscv_vfdiv_vv_f64m4(__VA_ARGS__) |
| #define | vfdiv_vv_f64m4_m(...) __riscv_vfdiv_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfdiv_vv_f64m8(...) __riscv_vfdiv_vv_f64m8(__VA_ARGS__) |
| #define | vfdiv_vv_f64m8_m(...) __riscv_vfdiv_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfirst_m_b1(...) __riscv_vfirst_m_b1(__VA_ARGS__) |
| #define | vfirst_m_b16(...) __riscv_vfirst_m_b16(__VA_ARGS__) |
| #define | vfirst_m_b16_m(...) __riscv_vfirst_m_b16_m(__VA_ARGS__) |
| #define | vfirst_m_b1_m(...) __riscv_vfirst_m_b1_m(__VA_ARGS__) |
| #define | vfirst_m_b2(...) __riscv_vfirst_m_b2(__VA_ARGS__) |
| #define | vfirst_m_b2_m(...) __riscv_vfirst_m_b2_m(__VA_ARGS__) |
| #define | vfirst_m_b32(...) __riscv_vfirst_m_b32(__VA_ARGS__) |
| #define | vfirst_m_b32_m(...) __riscv_vfirst_m_b32_m(__VA_ARGS__) |
| #define | vfirst_m_b4(...) __riscv_vfirst_m_b4(__VA_ARGS__) |
| #define | vfirst_m_b4_m(...) __riscv_vfirst_m_b4_m(__VA_ARGS__) |
| #define | vfirst_m_b64(...) __riscv_vfirst_m_b64(__VA_ARGS__) |
| #define | vfirst_m_b64_m(...) __riscv_vfirst_m_b64_m(__VA_ARGS__) |
| #define | vfirst_m_b8(...) __riscv_vfirst_m_b8(__VA_ARGS__) |
| #define | vfirst_m_b8_m(...) __riscv_vfirst_m_b8_m(__VA_ARGS__) |
| #define | vfmacc_vf_f16m1(...) __riscv_vfmacc_vf_f16m1_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f16m1_m(...) __riscv_vfmacc_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f16m2(...) __riscv_vfmacc_vf_f16m2_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f16m2_m(...) __riscv_vfmacc_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f16m4(...) __riscv_vfmacc_vf_f16m4_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f16m4_m(...) __riscv_vfmacc_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f16m8(...) __riscv_vfmacc_vf_f16m8_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f16m8_m(...) __riscv_vfmacc_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f16mf2(...) __riscv_vfmacc_vf_f16mf2_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f16mf2_m(...) __riscv_vfmacc_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f16mf4(...) __riscv_vfmacc_vf_f16mf4_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f16mf4_m(...) __riscv_vfmacc_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f32m1(...) __riscv_vfmacc_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f32m1_m(...) __riscv_vfmacc_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f32m2(...) __riscv_vfmacc_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f32m2_m(...) __riscv_vfmacc_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f32m4(...) __riscv_vfmacc_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f32m4_m(...) __riscv_vfmacc_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f32m8(...) __riscv_vfmacc_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f32m8_m(...) __riscv_vfmacc_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f32mf2(...) __riscv_vfmacc_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f32mf2_m(...) __riscv_vfmacc_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f64m1(...) __riscv_vfmacc_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f64m1_m(...) __riscv_vfmacc_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f64m2(...) __riscv_vfmacc_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f64m2_m(...) __riscv_vfmacc_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f64m4(...) __riscv_vfmacc_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f64m4_m(...) __riscv_vfmacc_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfmacc_vf_f64m8(...) __riscv_vfmacc_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfmacc_vf_f64m8_m(...) __riscv_vfmacc_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f16m1(...) __riscv_vfmacc_vv_f16m1_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f16m1_m(...) __riscv_vfmacc_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f16m2(...) __riscv_vfmacc_vv_f16m2_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f16m2_m(...) __riscv_vfmacc_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f16m4(...) __riscv_vfmacc_vv_f16m4_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f16m4_m(...) __riscv_vfmacc_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f16m8(...) __riscv_vfmacc_vv_f16m8_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f16m8_m(...) __riscv_vfmacc_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f16mf2(...) __riscv_vfmacc_vv_f16mf2_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f16mf2_m(...) __riscv_vfmacc_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f16mf4(...) __riscv_vfmacc_vv_f16mf4_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f16mf4_m(...) __riscv_vfmacc_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f32m1(...) __riscv_vfmacc_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f32m1_m(...) __riscv_vfmacc_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f32m2(...) __riscv_vfmacc_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f32m2_m(...) __riscv_vfmacc_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f32m4(...) __riscv_vfmacc_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f32m4_m(...) __riscv_vfmacc_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f32m8(...) __riscv_vfmacc_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f32m8_m(...) __riscv_vfmacc_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f32mf2(...) __riscv_vfmacc_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f32mf2_m(...) __riscv_vfmacc_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f64m1(...) __riscv_vfmacc_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f64m1_m(...) __riscv_vfmacc_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f64m2(...) __riscv_vfmacc_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f64m2_m(...) __riscv_vfmacc_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f64m4(...) __riscv_vfmacc_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f64m4_m(...) __riscv_vfmacc_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfmacc_vv_f64m8(...) __riscv_vfmacc_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfmacc_vv_f64m8_m(...) __riscv_vfmacc_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f16m1(...) __riscv_vfmadd_vf_f16m1_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f16m1_m(...) __riscv_vfmadd_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f16m2(...) __riscv_vfmadd_vf_f16m2_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f16m2_m(...) __riscv_vfmadd_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f16m4(...) __riscv_vfmadd_vf_f16m4_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f16m4_m(...) __riscv_vfmadd_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f16m8(...) __riscv_vfmadd_vf_f16m8_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f16m8_m(...) __riscv_vfmadd_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f16mf2(...) __riscv_vfmadd_vf_f16mf2_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f16mf2_m(...) __riscv_vfmadd_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f16mf4(...) __riscv_vfmadd_vf_f16mf4_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f16mf4_m(...) __riscv_vfmadd_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f32m1(...) __riscv_vfmadd_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f32m1_m(...) __riscv_vfmadd_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f32m2(...) __riscv_vfmadd_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f32m2_m(...) __riscv_vfmadd_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f32m4(...) __riscv_vfmadd_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f32m4_m(...) __riscv_vfmadd_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f32m8(...) __riscv_vfmadd_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f32m8_m(...) __riscv_vfmadd_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f32mf2(...) __riscv_vfmadd_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f32mf2_m(...) __riscv_vfmadd_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f64m1(...) __riscv_vfmadd_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f64m1_m(...) __riscv_vfmadd_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f64m2(...) __riscv_vfmadd_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f64m2_m(...) __riscv_vfmadd_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f64m4(...) __riscv_vfmadd_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f64m4_m(...) __riscv_vfmadd_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfmadd_vf_f64m8(...) __riscv_vfmadd_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfmadd_vf_f64m8_m(...) __riscv_vfmadd_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f16m1(...) __riscv_vfmadd_vv_f16m1_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f16m1_m(...) __riscv_vfmadd_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f16m2(...) __riscv_vfmadd_vv_f16m2_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f16m2_m(...) __riscv_vfmadd_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f16m4(...) __riscv_vfmadd_vv_f16m4_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f16m4_m(...) __riscv_vfmadd_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f16m8(...) __riscv_vfmadd_vv_f16m8_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f16m8_m(...) __riscv_vfmadd_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f16mf2(...) __riscv_vfmadd_vv_f16mf2_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f16mf2_m(...) __riscv_vfmadd_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f16mf4(...) __riscv_vfmadd_vv_f16mf4_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f16mf4_m(...) __riscv_vfmadd_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f32m1(...) __riscv_vfmadd_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f32m1_m(...) __riscv_vfmadd_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f32m2(...) __riscv_vfmadd_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f32m2_m(...) __riscv_vfmadd_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f32m4(...) __riscv_vfmadd_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f32m4_m(...) __riscv_vfmadd_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f32m8(...) __riscv_vfmadd_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f32m8_m(...) __riscv_vfmadd_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f32mf2(...) __riscv_vfmadd_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f32mf2_m(...) __riscv_vfmadd_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f64m1(...) __riscv_vfmadd_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f64m1_m(...) __riscv_vfmadd_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f64m2(...) __riscv_vfmadd_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f64m2_m(...) __riscv_vfmadd_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f64m4(...) __riscv_vfmadd_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f64m4_m(...) __riscv_vfmadd_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfmadd_vv_f64m8(...) __riscv_vfmadd_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfmadd_vv_f64m8_m(...) __riscv_vfmadd_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f16m1(...) __riscv_vfmax_vf_f16m1(__VA_ARGS__) |
| #define | vfmax_vf_f16m1_m(...) __riscv_vfmax_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f16m2(...) __riscv_vfmax_vf_f16m2(__VA_ARGS__) |
| #define | vfmax_vf_f16m2_m(...) __riscv_vfmax_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f16m4(...) __riscv_vfmax_vf_f16m4(__VA_ARGS__) |
| #define | vfmax_vf_f16m4_m(...) __riscv_vfmax_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f16m8(...) __riscv_vfmax_vf_f16m8(__VA_ARGS__) |
| #define | vfmax_vf_f16m8_m(...) __riscv_vfmax_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f16mf2(...) __riscv_vfmax_vf_f16mf2(__VA_ARGS__) |
| #define | vfmax_vf_f16mf2_m(...) __riscv_vfmax_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f16mf4(...) __riscv_vfmax_vf_f16mf4(__VA_ARGS__) |
| #define | vfmax_vf_f16mf4_m(...) __riscv_vfmax_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f32m1(...) __riscv_vfmax_vf_f32m1(__VA_ARGS__) |
| #define | vfmax_vf_f32m1_m(...) __riscv_vfmax_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f32m2(...) __riscv_vfmax_vf_f32m2(__VA_ARGS__) |
| #define | vfmax_vf_f32m2_m(...) __riscv_vfmax_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f32m4(...) __riscv_vfmax_vf_f32m4(__VA_ARGS__) |
| #define | vfmax_vf_f32m4_m(...) __riscv_vfmax_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f32m8(...) __riscv_vfmax_vf_f32m8(__VA_ARGS__) |
| #define | vfmax_vf_f32m8_m(...) __riscv_vfmax_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f32mf2(...) __riscv_vfmax_vf_f32mf2(__VA_ARGS__) |
| #define | vfmax_vf_f32mf2_m(...) __riscv_vfmax_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f64m1(...) __riscv_vfmax_vf_f64m1(__VA_ARGS__) |
| #define | vfmax_vf_f64m1_m(...) __riscv_vfmax_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f64m2(...) __riscv_vfmax_vf_f64m2(__VA_ARGS__) |
| #define | vfmax_vf_f64m2_m(...) __riscv_vfmax_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f64m4(...) __riscv_vfmax_vf_f64m4(__VA_ARGS__) |
| #define | vfmax_vf_f64m4_m(...) __riscv_vfmax_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfmax_vf_f64m8(...) __riscv_vfmax_vf_f64m8(__VA_ARGS__) |
| #define | vfmax_vf_f64m8_m(...) __riscv_vfmax_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f16m1(...) __riscv_vfmax_vv_f16m1(__VA_ARGS__) |
| #define | vfmax_vv_f16m1_m(...) __riscv_vfmax_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f16m2(...) __riscv_vfmax_vv_f16m2(__VA_ARGS__) |
| #define | vfmax_vv_f16m2_m(...) __riscv_vfmax_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f16m4(...) __riscv_vfmax_vv_f16m4(__VA_ARGS__) |
| #define | vfmax_vv_f16m4_m(...) __riscv_vfmax_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f16m8(...) __riscv_vfmax_vv_f16m8(__VA_ARGS__) |
| #define | vfmax_vv_f16m8_m(...) __riscv_vfmax_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f16mf2(...) __riscv_vfmax_vv_f16mf2(__VA_ARGS__) |
| #define | vfmax_vv_f16mf2_m(...) __riscv_vfmax_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f16mf4(...) __riscv_vfmax_vv_f16mf4(__VA_ARGS__) |
| #define | vfmax_vv_f16mf4_m(...) __riscv_vfmax_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f32m1(...) __riscv_vfmax_vv_f32m1(__VA_ARGS__) |
| #define | vfmax_vv_f32m1_m(...) __riscv_vfmax_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f32m2(...) __riscv_vfmax_vv_f32m2(__VA_ARGS__) |
| #define | vfmax_vv_f32m2_m(...) __riscv_vfmax_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f32m4(...) __riscv_vfmax_vv_f32m4(__VA_ARGS__) |
| #define | vfmax_vv_f32m4_m(...) __riscv_vfmax_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f32m8(...) __riscv_vfmax_vv_f32m8(__VA_ARGS__) |
| #define | vfmax_vv_f32m8_m(...) __riscv_vfmax_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f32mf2(...) __riscv_vfmax_vv_f32mf2(__VA_ARGS__) |
| #define | vfmax_vv_f32mf2_m(...) __riscv_vfmax_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f64m1(...) __riscv_vfmax_vv_f64m1(__VA_ARGS__) |
| #define | vfmax_vv_f64m1_m(...) __riscv_vfmax_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f64m2(...) __riscv_vfmax_vv_f64m2(__VA_ARGS__) |
| #define | vfmax_vv_f64m2_m(...) __riscv_vfmax_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f64m4(...) __riscv_vfmax_vv_f64m4(__VA_ARGS__) |
| #define | vfmax_vv_f64m4_m(...) __riscv_vfmax_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfmax_vv_f64m8(...) __riscv_vfmax_vv_f64m8(__VA_ARGS__) |
| #define | vfmax_vv_f64m8_m(...) __riscv_vfmax_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfmerge_vfm_f16m1(mask, op1, op2, vl) __riscv_vfmerge_vfm_f16m1((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f16m2(mask, op1, op2, vl) __riscv_vfmerge_vfm_f16m2((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f16m4(mask, op1, op2, vl) __riscv_vfmerge_vfm_f16m4((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f16m8(mask, op1, op2, vl) __riscv_vfmerge_vfm_f16m8((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f16mf2(mask, op1, op2, vl) __riscv_vfmerge_vfm_f16mf2((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f16mf4(mask, op1, op2, vl) __riscv_vfmerge_vfm_f16mf4((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f32m1(mask, op1, op2, vl) __riscv_vfmerge_vfm_f32m1((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f32m2(mask, op1, op2, vl) __riscv_vfmerge_vfm_f32m2((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f32m4(mask, op1, op2, vl) __riscv_vfmerge_vfm_f32m4((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f32m8(mask, op1, op2, vl) __riscv_vfmerge_vfm_f32m8((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f32mf2(mask, op1, op2, vl) __riscv_vfmerge_vfm_f32mf2((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f64m1(mask, op1, op2, vl) __riscv_vfmerge_vfm_f64m1((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f64m2(mask, op1, op2, vl) __riscv_vfmerge_vfm_f64m2((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f64m4(mask, op1, op2, vl) __riscv_vfmerge_vfm_f64m4((op1), (op2), (mask), (vl)) |
| #define | vfmerge_vfm_f64m8(mask, op1, op2, vl) __riscv_vfmerge_vfm_f64m8((op1), (op2), (mask), (vl)) |
| #define | vfmin_vf_f16m1(...) __riscv_vfmin_vf_f16m1(__VA_ARGS__) |
| #define | vfmin_vf_f16m1_m(...) __riscv_vfmin_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f16m2(...) __riscv_vfmin_vf_f16m2(__VA_ARGS__) |
| #define | vfmin_vf_f16m2_m(...) __riscv_vfmin_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f16m4(...) __riscv_vfmin_vf_f16m4(__VA_ARGS__) |
| #define | vfmin_vf_f16m4_m(...) __riscv_vfmin_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f16m8(...) __riscv_vfmin_vf_f16m8(__VA_ARGS__) |
| #define | vfmin_vf_f16m8_m(...) __riscv_vfmin_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f16mf2(...) __riscv_vfmin_vf_f16mf2(__VA_ARGS__) |
| #define | vfmin_vf_f16mf2_m(...) __riscv_vfmin_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f16mf4(...) __riscv_vfmin_vf_f16mf4(__VA_ARGS__) |
| #define | vfmin_vf_f16mf4_m(...) __riscv_vfmin_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f32m1(...) __riscv_vfmin_vf_f32m1(__VA_ARGS__) |
| #define | vfmin_vf_f32m1_m(...) __riscv_vfmin_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f32m2(...) __riscv_vfmin_vf_f32m2(__VA_ARGS__) |
| #define | vfmin_vf_f32m2_m(...) __riscv_vfmin_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f32m4(...) __riscv_vfmin_vf_f32m4(__VA_ARGS__) |
| #define | vfmin_vf_f32m4_m(...) __riscv_vfmin_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f32m8(...) __riscv_vfmin_vf_f32m8(__VA_ARGS__) |
| #define | vfmin_vf_f32m8_m(...) __riscv_vfmin_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f32mf2(...) __riscv_vfmin_vf_f32mf2(__VA_ARGS__) |
| #define | vfmin_vf_f32mf2_m(...) __riscv_vfmin_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f64m1(...) __riscv_vfmin_vf_f64m1(__VA_ARGS__) |
| #define | vfmin_vf_f64m1_m(...) __riscv_vfmin_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f64m2(...) __riscv_vfmin_vf_f64m2(__VA_ARGS__) |
| #define | vfmin_vf_f64m2_m(...) __riscv_vfmin_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f64m4(...) __riscv_vfmin_vf_f64m4(__VA_ARGS__) |
| #define | vfmin_vf_f64m4_m(...) __riscv_vfmin_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfmin_vf_f64m8(...) __riscv_vfmin_vf_f64m8(__VA_ARGS__) |
| #define | vfmin_vf_f64m8_m(...) __riscv_vfmin_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f16m1(...) __riscv_vfmin_vv_f16m1(__VA_ARGS__) |
| #define | vfmin_vv_f16m1_m(...) __riscv_vfmin_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f16m2(...) __riscv_vfmin_vv_f16m2(__VA_ARGS__) |
| #define | vfmin_vv_f16m2_m(...) __riscv_vfmin_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f16m4(...) __riscv_vfmin_vv_f16m4(__VA_ARGS__) |
| #define | vfmin_vv_f16m4_m(...) __riscv_vfmin_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f16m8(...) __riscv_vfmin_vv_f16m8(__VA_ARGS__) |
| #define | vfmin_vv_f16m8_m(...) __riscv_vfmin_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f16mf2(...) __riscv_vfmin_vv_f16mf2(__VA_ARGS__) |
| #define | vfmin_vv_f16mf2_m(...) __riscv_vfmin_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f16mf4(...) __riscv_vfmin_vv_f16mf4(__VA_ARGS__) |
| #define | vfmin_vv_f16mf4_m(...) __riscv_vfmin_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f32m1(...) __riscv_vfmin_vv_f32m1(__VA_ARGS__) |
| #define | vfmin_vv_f32m1_m(...) __riscv_vfmin_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f32m2(...) __riscv_vfmin_vv_f32m2(__VA_ARGS__) |
| #define | vfmin_vv_f32m2_m(...) __riscv_vfmin_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f32m4(...) __riscv_vfmin_vv_f32m4(__VA_ARGS__) |
| #define | vfmin_vv_f32m4_m(...) __riscv_vfmin_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f32m8(...) __riscv_vfmin_vv_f32m8(__VA_ARGS__) |
| #define | vfmin_vv_f32m8_m(...) __riscv_vfmin_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f32mf2(...) __riscv_vfmin_vv_f32mf2(__VA_ARGS__) |
| #define | vfmin_vv_f32mf2_m(...) __riscv_vfmin_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f64m1(...) __riscv_vfmin_vv_f64m1(__VA_ARGS__) |
| #define | vfmin_vv_f64m1_m(...) __riscv_vfmin_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f64m2(...) __riscv_vfmin_vv_f64m2(__VA_ARGS__) |
| #define | vfmin_vv_f64m2_m(...) __riscv_vfmin_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f64m4(...) __riscv_vfmin_vv_f64m4(__VA_ARGS__) |
| #define | vfmin_vv_f64m4_m(...) __riscv_vfmin_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfmin_vv_f64m8(...) __riscv_vfmin_vv_f64m8(__VA_ARGS__) |
| #define | vfmin_vv_f64m8_m(...) __riscv_vfmin_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f16m1(...) __riscv_vfmsac_vf_f16m1_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f16m1_m(...) __riscv_vfmsac_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f16m2(...) __riscv_vfmsac_vf_f16m2_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f16m2_m(...) __riscv_vfmsac_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f16m4(...) __riscv_vfmsac_vf_f16m4_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f16m4_m(...) __riscv_vfmsac_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f16m8(...) __riscv_vfmsac_vf_f16m8_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f16m8_m(...) __riscv_vfmsac_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f16mf2(...) __riscv_vfmsac_vf_f16mf2_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f16mf2_m(...) __riscv_vfmsac_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f16mf4(...) __riscv_vfmsac_vf_f16mf4_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f16mf4_m(...) __riscv_vfmsac_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f32m1(...) __riscv_vfmsac_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f32m1_m(...) __riscv_vfmsac_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f32m2(...) __riscv_vfmsac_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f32m2_m(...) __riscv_vfmsac_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f32m4(...) __riscv_vfmsac_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f32m4_m(...) __riscv_vfmsac_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f32m8(...) __riscv_vfmsac_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f32m8_m(...) __riscv_vfmsac_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f32mf2(...) __riscv_vfmsac_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f32mf2_m(...) __riscv_vfmsac_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f64m1(...) __riscv_vfmsac_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f64m1_m(...) __riscv_vfmsac_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f64m2(...) __riscv_vfmsac_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f64m2_m(...) __riscv_vfmsac_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f64m4(...) __riscv_vfmsac_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f64m4_m(...) __riscv_vfmsac_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfmsac_vf_f64m8(...) __riscv_vfmsac_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfmsac_vf_f64m8_m(...) __riscv_vfmsac_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f16m1(...) __riscv_vfmsac_vv_f16m1_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f16m1_m(...) __riscv_vfmsac_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f16m2(...) __riscv_vfmsac_vv_f16m2_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f16m2_m(...) __riscv_vfmsac_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f16m4(...) __riscv_vfmsac_vv_f16m4_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f16m4_m(...) __riscv_vfmsac_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f16m8(...) __riscv_vfmsac_vv_f16m8_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f16m8_m(...) __riscv_vfmsac_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f16mf2(...) __riscv_vfmsac_vv_f16mf2_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f16mf2_m(...) __riscv_vfmsac_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f16mf4(...) __riscv_vfmsac_vv_f16mf4_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f16mf4_m(...) __riscv_vfmsac_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f32m1(...) __riscv_vfmsac_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f32m1_m(...) __riscv_vfmsac_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f32m2(...) __riscv_vfmsac_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f32m2_m(...) __riscv_vfmsac_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f32m4(...) __riscv_vfmsac_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f32m4_m(...) __riscv_vfmsac_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f32m8(...) __riscv_vfmsac_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f32m8_m(...) __riscv_vfmsac_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f32mf2(...) __riscv_vfmsac_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f32mf2_m(...) __riscv_vfmsac_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f64m1(...) __riscv_vfmsac_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f64m1_m(...) __riscv_vfmsac_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f64m2(...) __riscv_vfmsac_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f64m2_m(...) __riscv_vfmsac_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f64m4(...) __riscv_vfmsac_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f64m4_m(...) __riscv_vfmsac_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfmsac_vv_f64m8(...) __riscv_vfmsac_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfmsac_vv_f64m8_m(...) __riscv_vfmsac_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f16m1(...) __riscv_vfmsub_vf_f16m1_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f16m1_m(...) __riscv_vfmsub_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f16m2(...) __riscv_vfmsub_vf_f16m2_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f16m2_m(...) __riscv_vfmsub_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f16m4(...) __riscv_vfmsub_vf_f16m4_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f16m4_m(...) __riscv_vfmsub_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f16m8(...) __riscv_vfmsub_vf_f16m8_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f16m8_m(...) __riscv_vfmsub_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f16mf2(...) __riscv_vfmsub_vf_f16mf2_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f16mf2_m(...) __riscv_vfmsub_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f16mf4(...) __riscv_vfmsub_vf_f16mf4_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f16mf4_m(...) __riscv_vfmsub_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f32m1(...) __riscv_vfmsub_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f32m1_m(...) __riscv_vfmsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f32m2(...) __riscv_vfmsub_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f32m2_m(...) __riscv_vfmsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f32m4(...) __riscv_vfmsub_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f32m4_m(...) __riscv_vfmsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f32m8(...) __riscv_vfmsub_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f32m8_m(...) __riscv_vfmsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f32mf2(...) __riscv_vfmsub_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f32mf2_m(...) __riscv_vfmsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f64m1(...) __riscv_vfmsub_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f64m1_m(...) __riscv_vfmsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f64m2(...) __riscv_vfmsub_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f64m2_m(...) __riscv_vfmsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f64m4(...) __riscv_vfmsub_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f64m4_m(...) __riscv_vfmsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfmsub_vf_f64m8(...) __riscv_vfmsub_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfmsub_vf_f64m8_m(...) __riscv_vfmsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f16m1(...) __riscv_vfmsub_vv_f16m1_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f16m1_m(...) __riscv_vfmsub_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f16m2(...) __riscv_vfmsub_vv_f16m2_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f16m2_m(...) __riscv_vfmsub_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f16m4(...) __riscv_vfmsub_vv_f16m4_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f16m4_m(...) __riscv_vfmsub_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f16m8(...) __riscv_vfmsub_vv_f16m8_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f16m8_m(...) __riscv_vfmsub_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f16mf2(...) __riscv_vfmsub_vv_f16mf2_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f16mf2_m(...) __riscv_vfmsub_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f16mf4(...) __riscv_vfmsub_vv_f16mf4_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f16mf4_m(...) __riscv_vfmsub_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f32m1(...) __riscv_vfmsub_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f32m1_m(...) __riscv_vfmsub_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f32m2(...) __riscv_vfmsub_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f32m2_m(...) __riscv_vfmsub_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f32m4(...) __riscv_vfmsub_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f32m4_m(...) __riscv_vfmsub_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f32m8(...) __riscv_vfmsub_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f32m8_m(...) __riscv_vfmsub_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f32mf2(...) __riscv_vfmsub_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f32mf2_m(...) __riscv_vfmsub_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f64m1(...) __riscv_vfmsub_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f64m1_m(...) __riscv_vfmsub_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f64m2(...) __riscv_vfmsub_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f64m2_m(...) __riscv_vfmsub_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f64m4(...) __riscv_vfmsub_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f64m4_m(...) __riscv_vfmsub_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfmsub_vv_f64m8(...) __riscv_vfmsub_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfmsub_vv_f64m8_m(...) __riscv_vfmsub_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f16m1(...) __riscv_vfmul_vf_f16m1(__VA_ARGS__) |
| #define | vfmul_vf_f16m1_m(...) __riscv_vfmul_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f16m2(...) __riscv_vfmul_vf_f16m2(__VA_ARGS__) |
| #define | vfmul_vf_f16m2_m(...) __riscv_vfmul_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f16m4(...) __riscv_vfmul_vf_f16m4(__VA_ARGS__) |
| #define | vfmul_vf_f16m4_m(...) __riscv_vfmul_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f16m8(...) __riscv_vfmul_vf_f16m8(__VA_ARGS__) |
| #define | vfmul_vf_f16m8_m(...) __riscv_vfmul_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f16mf2(...) __riscv_vfmul_vf_f16mf2(__VA_ARGS__) |
| #define | vfmul_vf_f16mf2_m(...) __riscv_vfmul_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f16mf4(...) __riscv_vfmul_vf_f16mf4(__VA_ARGS__) |
| #define | vfmul_vf_f16mf4_m(...) __riscv_vfmul_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f32m1(...) __riscv_vfmul_vf_f32m1(__VA_ARGS__) |
| #define | vfmul_vf_f32m1_m(...) __riscv_vfmul_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f32m2(...) __riscv_vfmul_vf_f32m2(__VA_ARGS__) |
| #define | vfmul_vf_f32m2_m(...) __riscv_vfmul_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f32m4(...) __riscv_vfmul_vf_f32m4(__VA_ARGS__) |
| #define | vfmul_vf_f32m4_m(...) __riscv_vfmul_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f32m8(...) __riscv_vfmul_vf_f32m8(__VA_ARGS__) |
| #define | vfmul_vf_f32m8_m(...) __riscv_vfmul_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f32mf2(...) __riscv_vfmul_vf_f32mf2(__VA_ARGS__) |
| #define | vfmul_vf_f32mf2_m(...) __riscv_vfmul_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f64m1(...) __riscv_vfmul_vf_f64m1(__VA_ARGS__) |
| #define | vfmul_vf_f64m1_m(...) __riscv_vfmul_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f64m2(...) __riscv_vfmul_vf_f64m2(__VA_ARGS__) |
| #define | vfmul_vf_f64m2_m(...) __riscv_vfmul_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f64m4(...) __riscv_vfmul_vf_f64m4(__VA_ARGS__) |
| #define | vfmul_vf_f64m4_m(...) __riscv_vfmul_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfmul_vf_f64m8(...) __riscv_vfmul_vf_f64m8(__VA_ARGS__) |
| #define | vfmul_vf_f64m8_m(...) __riscv_vfmul_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f16m1(...) __riscv_vfmul_vv_f16m1(__VA_ARGS__) |
| #define | vfmul_vv_f16m1_m(...) __riscv_vfmul_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f16m2(...) __riscv_vfmul_vv_f16m2(__VA_ARGS__) |
| #define | vfmul_vv_f16m2_m(...) __riscv_vfmul_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f16m4(...) __riscv_vfmul_vv_f16m4(__VA_ARGS__) |
| #define | vfmul_vv_f16m4_m(...) __riscv_vfmul_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f16m8(...) __riscv_vfmul_vv_f16m8(__VA_ARGS__) |
| #define | vfmul_vv_f16m8_m(...) __riscv_vfmul_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f16mf2(...) __riscv_vfmul_vv_f16mf2(__VA_ARGS__) |
| #define | vfmul_vv_f16mf2_m(...) __riscv_vfmul_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f16mf4(...) __riscv_vfmul_vv_f16mf4(__VA_ARGS__) |
| #define | vfmul_vv_f16mf4_m(...) __riscv_vfmul_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f32m1(...) __riscv_vfmul_vv_f32m1(__VA_ARGS__) |
| #define | vfmul_vv_f32m1_m(...) __riscv_vfmul_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f32m2(...) __riscv_vfmul_vv_f32m2(__VA_ARGS__) |
| #define | vfmul_vv_f32m2_m(...) __riscv_vfmul_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f32m4(...) __riscv_vfmul_vv_f32m4(__VA_ARGS__) |
| #define | vfmul_vv_f32m4_m(...) __riscv_vfmul_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f32m8(...) __riscv_vfmul_vv_f32m8(__VA_ARGS__) |
| #define | vfmul_vv_f32m8_m(...) __riscv_vfmul_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f32mf2(...) __riscv_vfmul_vv_f32mf2(__VA_ARGS__) |
| #define | vfmul_vv_f32mf2_m(...) __riscv_vfmul_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f64m1(...) __riscv_vfmul_vv_f64m1(__VA_ARGS__) |
| #define | vfmul_vv_f64m1_m(...) __riscv_vfmul_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f64m2(...) __riscv_vfmul_vv_f64m2(__VA_ARGS__) |
| #define | vfmul_vv_f64m2_m(...) __riscv_vfmul_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f64m4(...) __riscv_vfmul_vv_f64m4(__VA_ARGS__) |
| #define | vfmul_vv_f64m4_m(...) __riscv_vfmul_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfmul_vv_f64m8(...) __riscv_vfmul_vv_f64m8(__VA_ARGS__) |
| #define | vfmul_vv_f64m8_m(...) __riscv_vfmul_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfmv_f_s_f16m1_f16(...) __riscv_vfmv_f_s_f16m1_f16(__VA_ARGS__) |
| #define | vfmv_f_s_f16m2_f16(...) __riscv_vfmv_f_s_f16m2_f16(__VA_ARGS__) |
| #define | vfmv_f_s_f16m4_f16(...) __riscv_vfmv_f_s_f16m4_f16(__VA_ARGS__) |
| #define | vfmv_f_s_f16m8_f16(...) __riscv_vfmv_f_s_f16m8_f16(__VA_ARGS__) |
| #define | vfmv_f_s_f16mf2_f16(...) __riscv_vfmv_f_s_f16mf2_f16(__VA_ARGS__) |
| #define | vfmv_f_s_f16mf4_f16(...) __riscv_vfmv_f_s_f16mf4_f16(__VA_ARGS__) |
| #define | vfmv_f_s_f32m1_f32(...) __riscv_vfmv_f_s_f32m1_f32(__VA_ARGS__) |
| #define | vfmv_f_s_f32m2_f32(...) __riscv_vfmv_f_s_f32m2_f32(__VA_ARGS__) |
| #define | vfmv_f_s_f32m4_f32(...) __riscv_vfmv_f_s_f32m4_f32(__VA_ARGS__) |
| #define | vfmv_f_s_f32m8_f32(...) __riscv_vfmv_f_s_f32m8_f32(__VA_ARGS__) |
| #define | vfmv_f_s_f32mf2_f32(...) __riscv_vfmv_f_s_f32mf2_f32(__VA_ARGS__) |
| #define | vfmv_f_s_f64m1_f64(...) __riscv_vfmv_f_s_f64m1_f64(__VA_ARGS__) |
| #define | vfmv_f_s_f64m2_f64(...) __riscv_vfmv_f_s_f64m2_f64(__VA_ARGS__) |
| #define | vfmv_f_s_f64m4_f64(...) __riscv_vfmv_f_s_f64m4_f64(__VA_ARGS__) |
| #define | vfmv_f_s_f64m8_f64(...) __riscv_vfmv_f_s_f64m8_f64(__VA_ARGS__) |
| #define | vfmv_s_f_f16m1(...) __riscv_vfmv_s_f_f16m1_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f16m2(...) __riscv_vfmv_s_f_f16m2_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f16m4(...) __riscv_vfmv_s_f_f16m4_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f16m8(...) __riscv_vfmv_s_f_f16m8_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f16mf2(...) __riscv_vfmv_s_f_f16mf2_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f16mf4(...) __riscv_vfmv_s_f_f16mf4_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f32m1(...) __riscv_vfmv_s_f_f32m1_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f32m2(...) __riscv_vfmv_s_f_f32m2_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f32m4(...) __riscv_vfmv_s_f_f32m4_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f32m8(...) __riscv_vfmv_s_f_f32m8_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f32mf2(...) __riscv_vfmv_s_f_f32mf2_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f64m1(...) __riscv_vfmv_s_f_f64m1_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f64m2(...) __riscv_vfmv_s_f_f64m2_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f64m4(...) __riscv_vfmv_s_f_f64m4_tu(__VA_ARGS__) |
| #define | vfmv_s_f_f64m8(...) __riscv_vfmv_s_f_f64m8_tu(__VA_ARGS__) |
| #define | vfmv_v_f_f16m1(...) __riscv_vfmv_v_f_f16m1(__VA_ARGS__) |
| #define | vfmv_v_f_f16m2(...) __riscv_vfmv_v_f_f16m2(__VA_ARGS__) |
| #define | vfmv_v_f_f16m4(...) __riscv_vfmv_v_f_f16m4(__VA_ARGS__) |
| #define | vfmv_v_f_f16m8(...) __riscv_vfmv_v_f_f16m8(__VA_ARGS__) |
| #define | vfmv_v_f_f16mf2(...) __riscv_vfmv_v_f_f16mf2(__VA_ARGS__) |
| #define | vfmv_v_f_f16mf4(...) __riscv_vfmv_v_f_f16mf4(__VA_ARGS__) |
| #define | vfmv_v_f_f32m1(...) __riscv_vfmv_v_f_f32m1(__VA_ARGS__) |
| #define | vfmv_v_f_f32m2(...) __riscv_vfmv_v_f_f32m2(__VA_ARGS__) |
| #define | vfmv_v_f_f32m4(...) __riscv_vfmv_v_f_f32m4(__VA_ARGS__) |
| #define | vfmv_v_f_f32m8(...) __riscv_vfmv_v_f_f32m8(__VA_ARGS__) |
| #define | vfmv_v_f_f32mf2(...) __riscv_vfmv_v_f_f32mf2(__VA_ARGS__) |
| #define | vfmv_v_f_f64m1(...) __riscv_vfmv_v_f_f64m1(__VA_ARGS__) |
| #define | vfmv_v_f_f64m2(...) __riscv_vfmv_v_f_f64m2(__VA_ARGS__) |
| #define | vfmv_v_f_f64m4(...) __riscv_vfmv_v_f_f64m4(__VA_ARGS__) |
| #define | vfmv_v_f_f64m8(...) __riscv_vfmv_v_f_f64m8(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16m1(...) __riscv_vfncvt_f_f_w_f16m1(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16m1_m(...) __riscv_vfncvt_f_f_w_f16m1_tumu(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16m2(...) __riscv_vfncvt_f_f_w_f16m2(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16m2_m(...) __riscv_vfncvt_f_f_w_f16m2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16m4(...) __riscv_vfncvt_f_f_w_f16m4(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16m4_m(...) __riscv_vfncvt_f_f_w_f16m4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16mf2(...) __riscv_vfncvt_f_f_w_f16mf2(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16mf2_m(...) __riscv_vfncvt_f_f_w_f16mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16mf4(...) __riscv_vfncvt_f_f_w_f16mf4(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f16mf4_m(...) __riscv_vfncvt_f_f_w_f16mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f32m1(...) __riscv_vfncvt_f_f_w_f32m1(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f32m1_m(...) __riscv_vfncvt_f_f_w_f32m1_tumu(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f32m2(...) __riscv_vfncvt_f_f_w_f32m2(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f32m2_m(...) __riscv_vfncvt_f_f_w_f32m2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f32m4(...) __riscv_vfncvt_f_f_w_f32m4(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f32m4_m(...) __riscv_vfncvt_f_f_w_f32m4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f32mf2(...) __riscv_vfncvt_f_f_w_f32mf2(__VA_ARGS__) |
| #define | vfncvt_f_f_w_f32mf2_m(...) __riscv_vfncvt_f_f_w_f32mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16m1(...) __riscv_vfncvt_f_x_w_f16m1(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16m1_m(...) __riscv_vfncvt_f_x_w_f16m1_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16m2(...) __riscv_vfncvt_f_x_w_f16m2(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16m2_m(...) __riscv_vfncvt_f_x_w_f16m2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16m4(...) __riscv_vfncvt_f_x_w_f16m4(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16m4_m(...) __riscv_vfncvt_f_x_w_f16m4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16mf2(...) __riscv_vfncvt_f_x_w_f16mf2(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16mf2_m(...) __riscv_vfncvt_f_x_w_f16mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16mf4(...) __riscv_vfncvt_f_x_w_f16mf4(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f16mf4_m(...) __riscv_vfncvt_f_x_w_f16mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f32m1(...) __riscv_vfncvt_f_x_w_f32m1(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f32m1_m(...) __riscv_vfncvt_f_x_w_f32m1_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f32m2(...) __riscv_vfncvt_f_x_w_f32m2(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f32m2_m(...) __riscv_vfncvt_f_x_w_f32m2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f32m4(...) __riscv_vfncvt_f_x_w_f32m4(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f32m4_m(...) __riscv_vfncvt_f_x_w_f32m4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f32mf2(...) __riscv_vfncvt_f_x_w_f32mf2(__VA_ARGS__) |
| #define | vfncvt_f_x_w_f32mf2_m(...) __riscv_vfncvt_f_x_w_f32mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16m1(...) __riscv_vfncvt_f_xu_w_f16m1(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16m1_m(...) __riscv_vfncvt_f_xu_w_f16m1_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16m2(...) __riscv_vfncvt_f_xu_w_f16m2(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16m2_m(...) __riscv_vfncvt_f_xu_w_f16m2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16m4(...) __riscv_vfncvt_f_xu_w_f16m4(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16m4_m(...) __riscv_vfncvt_f_xu_w_f16m4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16mf2(...) __riscv_vfncvt_f_xu_w_f16mf2(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16mf2_m(...) __riscv_vfncvt_f_xu_w_f16mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16mf4(...) __riscv_vfncvt_f_xu_w_f16mf4(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f16mf4_m(...) __riscv_vfncvt_f_xu_w_f16mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f32m1(...) __riscv_vfncvt_f_xu_w_f32m1(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f32m1_m(...) __riscv_vfncvt_f_xu_w_f32m1_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f32m2(...) __riscv_vfncvt_f_xu_w_f32m2(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f32m2_m(...) __riscv_vfncvt_f_xu_w_f32m2_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f32m4(...) __riscv_vfncvt_f_xu_w_f32m4(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f32m4_m(...) __riscv_vfncvt_f_xu_w_f32m4_tumu(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f32mf2(...) __riscv_vfncvt_f_xu_w_f32mf2(__VA_ARGS__) |
| #define | vfncvt_f_xu_w_f32mf2_m(...) __riscv_vfncvt_f_xu_w_f32mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16m1(...) __riscv_vfncvt_rod_f_f_w_f16m1(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16m1_m(...) __riscv_vfncvt_rod_f_f_w_f16m1_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16m2(...) __riscv_vfncvt_rod_f_f_w_f16m2(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16m2_m(...) __riscv_vfncvt_rod_f_f_w_f16m2_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16m4(...) __riscv_vfncvt_rod_f_f_w_f16m4(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16m4_m(...) __riscv_vfncvt_rod_f_f_w_f16m4_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16mf2(...) __riscv_vfncvt_rod_f_f_w_f16mf2(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16mf2_m(...) __riscv_vfncvt_rod_f_f_w_f16mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16mf4(...) __riscv_vfncvt_rod_f_f_w_f16mf4(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f16mf4_m(...) __riscv_vfncvt_rod_f_f_w_f16mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f32m1(...) __riscv_vfncvt_rod_f_f_w_f32m1(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f32m1_m(...) __riscv_vfncvt_rod_f_f_w_f32m1_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f32m2(...) __riscv_vfncvt_rod_f_f_w_f32m2(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f32m2_m(...) __riscv_vfncvt_rod_f_f_w_f32m2_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f32m4(...) __riscv_vfncvt_rod_f_f_w_f32m4(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f32m4_m(...) __riscv_vfncvt_rod_f_f_w_f32m4_tumu(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f32mf2(...) __riscv_vfncvt_rod_f_f_w_f32mf2(__VA_ARGS__) |
| #define | vfncvt_rod_f_f_w_f32mf2_m(...) __riscv_vfncvt_rod_f_f_w_f32mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16m1(...) __riscv_vfncvt_rtz_x_f_w_i16m1(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16m1_m(...) __riscv_vfncvt_rtz_x_f_w_i16m1_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16m2(...) __riscv_vfncvt_rtz_x_f_w_i16m2(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16m2_m(...) __riscv_vfncvt_rtz_x_f_w_i16m2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16m4(...) __riscv_vfncvt_rtz_x_f_w_i16m4(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16m4_m(...) __riscv_vfncvt_rtz_x_f_w_i16m4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16mf2(...) __riscv_vfncvt_rtz_x_f_w_i16mf2(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16mf2_m(...) __riscv_vfncvt_rtz_x_f_w_i16mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16mf4(...) __riscv_vfncvt_rtz_x_f_w_i16mf4(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i16mf4_m(...) __riscv_vfncvt_rtz_x_f_w_i16mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i32m1(...) __riscv_vfncvt_rtz_x_f_w_i32m1(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i32m1_m(...) __riscv_vfncvt_rtz_x_f_w_i32m1_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i32m2(...) __riscv_vfncvt_rtz_x_f_w_i32m2(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i32m2_m(...) __riscv_vfncvt_rtz_x_f_w_i32m2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i32m4(...) __riscv_vfncvt_rtz_x_f_w_i32m4(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i32m4_m(...) __riscv_vfncvt_rtz_x_f_w_i32m4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i32mf2(...) __riscv_vfncvt_rtz_x_f_w_i32mf2(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i32mf2_m(...) __riscv_vfncvt_rtz_x_f_w_i32mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8m1(...) __riscv_vfncvt_rtz_x_f_w_i8m1(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8m1_m(...) __riscv_vfncvt_rtz_x_f_w_i8m1_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8m2(...) __riscv_vfncvt_rtz_x_f_w_i8m2(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8m2_m(...) __riscv_vfncvt_rtz_x_f_w_i8m2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8m4(...) __riscv_vfncvt_rtz_x_f_w_i8m4(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8m4_m(...) __riscv_vfncvt_rtz_x_f_w_i8m4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8mf2(...) __riscv_vfncvt_rtz_x_f_w_i8mf2(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8mf2_m(...) __riscv_vfncvt_rtz_x_f_w_i8mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8mf4(...) __riscv_vfncvt_rtz_x_f_w_i8mf4(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8mf4_m(...) __riscv_vfncvt_rtz_x_f_w_i8mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8mf8(...) __riscv_vfncvt_rtz_x_f_w_i8mf8(__VA_ARGS__) |
| #define | vfncvt_rtz_x_f_w_i8mf8_m(...) __riscv_vfncvt_rtz_x_f_w_i8mf8_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16m1(...) __riscv_vfncvt_rtz_xu_f_w_u16m1(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16m1_m(...) __riscv_vfncvt_rtz_xu_f_w_u16m1_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16m2(...) __riscv_vfncvt_rtz_xu_f_w_u16m2(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16m2_m(...) __riscv_vfncvt_rtz_xu_f_w_u16m2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16m4(...) __riscv_vfncvt_rtz_xu_f_w_u16m4(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16m4_m(...) __riscv_vfncvt_rtz_xu_f_w_u16m4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16mf2(...) __riscv_vfncvt_rtz_xu_f_w_u16mf2(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16mf2_m(...) __riscv_vfncvt_rtz_xu_f_w_u16mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16mf4(...) __riscv_vfncvt_rtz_xu_f_w_u16mf4(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u16mf4_m(...) __riscv_vfncvt_rtz_xu_f_w_u16mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u32m1(...) __riscv_vfncvt_rtz_xu_f_w_u32m1(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u32m1_m(...) __riscv_vfncvt_rtz_xu_f_w_u32m1_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u32m2(...) __riscv_vfncvt_rtz_xu_f_w_u32m2(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u32m2_m(...) __riscv_vfncvt_rtz_xu_f_w_u32m2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u32m4(...) __riscv_vfncvt_rtz_xu_f_w_u32m4(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u32m4_m(...) __riscv_vfncvt_rtz_xu_f_w_u32m4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u32mf2(...) __riscv_vfncvt_rtz_xu_f_w_u32mf2(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u32mf2_m(...) __riscv_vfncvt_rtz_xu_f_w_u32mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8m1(...) __riscv_vfncvt_rtz_xu_f_w_u8m1(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8m1_m(...) __riscv_vfncvt_rtz_xu_f_w_u8m1_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8m2(...) __riscv_vfncvt_rtz_xu_f_w_u8m2(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8m2_m(...) __riscv_vfncvt_rtz_xu_f_w_u8m2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8m4(...) __riscv_vfncvt_rtz_xu_f_w_u8m4(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8m4_m(...) __riscv_vfncvt_rtz_xu_f_w_u8m4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8mf2(...) __riscv_vfncvt_rtz_xu_f_w_u8mf2(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8mf2_m(...) __riscv_vfncvt_rtz_xu_f_w_u8mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8mf4(...) __riscv_vfncvt_rtz_xu_f_w_u8mf4(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8mf4_m(...) __riscv_vfncvt_rtz_xu_f_w_u8mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8mf8(...) __riscv_vfncvt_rtz_xu_f_w_u8mf8(__VA_ARGS__) |
| #define | vfncvt_rtz_xu_f_w_u8mf8_m(...) __riscv_vfncvt_rtz_xu_f_w_u8mf8_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16m1(...) __riscv_vfncvt_x_f_w_i16m1(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16m1_m(...) __riscv_vfncvt_x_f_w_i16m1_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16m2(...) __riscv_vfncvt_x_f_w_i16m2(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16m2_m(...) __riscv_vfncvt_x_f_w_i16m2_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16m4(...) __riscv_vfncvt_x_f_w_i16m4(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16m4_m(...) __riscv_vfncvt_x_f_w_i16m4_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16mf2(...) __riscv_vfncvt_x_f_w_i16mf2(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16mf2_m(...) __riscv_vfncvt_x_f_w_i16mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16mf4(...) __riscv_vfncvt_x_f_w_i16mf4(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i16mf4_m(...) __riscv_vfncvt_x_f_w_i16mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i32m1(...) __riscv_vfncvt_x_f_w_i32m1(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i32m1_m(...) __riscv_vfncvt_x_f_w_i32m1_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i32m2(...) __riscv_vfncvt_x_f_w_i32m2(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i32m2_m(...) __riscv_vfncvt_x_f_w_i32m2_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i32m4(...) __riscv_vfncvt_x_f_w_i32m4(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i32m4_m(...) __riscv_vfncvt_x_f_w_i32m4_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i32mf2(...) __riscv_vfncvt_x_f_w_i32mf2(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i32mf2_m(...) __riscv_vfncvt_x_f_w_i32mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8m1(...) __riscv_vfncvt_x_f_w_i8m1(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8m1_m(...) __riscv_vfncvt_x_f_w_i8m1_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8m2(...) __riscv_vfncvt_x_f_w_i8m2(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8m2_m(...) __riscv_vfncvt_x_f_w_i8m2_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8m4(...) __riscv_vfncvt_x_f_w_i8m4(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8m4_m(...) __riscv_vfncvt_x_f_w_i8m4_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8mf2(...) __riscv_vfncvt_x_f_w_i8mf2(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8mf2_m(...) __riscv_vfncvt_x_f_w_i8mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8mf4(...) __riscv_vfncvt_x_f_w_i8mf4(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8mf4_m(...) __riscv_vfncvt_x_f_w_i8mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8mf8(...) __riscv_vfncvt_x_f_w_i8mf8(__VA_ARGS__) |
| #define | vfncvt_x_f_w_i8mf8_m(...) __riscv_vfncvt_x_f_w_i8mf8_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16m1(...) __riscv_vfncvt_xu_f_w_u16m1(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16m1_m(...) __riscv_vfncvt_xu_f_w_u16m1_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16m2(...) __riscv_vfncvt_xu_f_w_u16m2(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16m2_m(...) __riscv_vfncvt_xu_f_w_u16m2_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16m4(...) __riscv_vfncvt_xu_f_w_u16m4(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16m4_m(...) __riscv_vfncvt_xu_f_w_u16m4_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16mf2(...) __riscv_vfncvt_xu_f_w_u16mf2(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16mf2_m(...) __riscv_vfncvt_xu_f_w_u16mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16mf4(...) __riscv_vfncvt_xu_f_w_u16mf4(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u16mf4_m(...) __riscv_vfncvt_xu_f_w_u16mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u32m1(...) __riscv_vfncvt_xu_f_w_u32m1(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u32m1_m(...) __riscv_vfncvt_xu_f_w_u32m1_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u32m2(...) __riscv_vfncvt_xu_f_w_u32m2(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u32m2_m(...) __riscv_vfncvt_xu_f_w_u32m2_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u32m4(...) __riscv_vfncvt_xu_f_w_u32m4(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u32m4_m(...) __riscv_vfncvt_xu_f_w_u32m4_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u32mf2(...) __riscv_vfncvt_xu_f_w_u32mf2(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u32mf2_m(...) __riscv_vfncvt_xu_f_w_u32mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8m1(...) __riscv_vfncvt_xu_f_w_u8m1(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8m1_m(...) __riscv_vfncvt_xu_f_w_u8m1_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8m2(...) __riscv_vfncvt_xu_f_w_u8m2(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8m2_m(...) __riscv_vfncvt_xu_f_w_u8m2_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8m4(...) __riscv_vfncvt_xu_f_w_u8m4(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8m4_m(...) __riscv_vfncvt_xu_f_w_u8m4_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8mf2(...) __riscv_vfncvt_xu_f_w_u8mf2(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8mf2_m(...) __riscv_vfncvt_xu_f_w_u8mf2_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8mf4(...) __riscv_vfncvt_xu_f_w_u8mf4(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8mf4_m(...) __riscv_vfncvt_xu_f_w_u8mf4_tumu(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8mf8(...) __riscv_vfncvt_xu_f_w_u8mf8(__VA_ARGS__) |
| #define | vfncvt_xu_f_w_u8mf8_m(...) __riscv_vfncvt_xu_f_w_u8mf8_tumu(__VA_ARGS__) |
| #define | vfneg_v_f16m1(...) __riscv_vfneg_v_f16m1(__VA_ARGS__) |
| #define | vfneg_v_f16m1_m(...) __riscv_vfneg_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfneg_v_f16m2(...) __riscv_vfneg_v_f16m2(__VA_ARGS__) |
| #define | vfneg_v_f16m2_m(...) __riscv_vfneg_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfneg_v_f16m4(...) __riscv_vfneg_v_f16m4(__VA_ARGS__) |
| #define | vfneg_v_f16m4_m(...) __riscv_vfneg_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfneg_v_f16m8(...) __riscv_vfneg_v_f16m8(__VA_ARGS__) |
| #define | vfneg_v_f16m8_m(...) __riscv_vfneg_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfneg_v_f16mf2(...) __riscv_vfneg_v_f16mf2(__VA_ARGS__) |
| #define | vfneg_v_f16mf2_m(...) __riscv_vfneg_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfneg_v_f16mf4(...) __riscv_vfneg_v_f16mf4(__VA_ARGS__) |
| #define | vfneg_v_f16mf4_m(...) __riscv_vfneg_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfneg_v_f32m1(...) __riscv_vfneg_v_f32m1(__VA_ARGS__) |
| #define | vfneg_v_f32m1_m(...) __riscv_vfneg_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfneg_v_f32m2(...) __riscv_vfneg_v_f32m2(__VA_ARGS__) |
| #define | vfneg_v_f32m2_m(...) __riscv_vfneg_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfneg_v_f32m4(...) __riscv_vfneg_v_f32m4(__VA_ARGS__) |
| #define | vfneg_v_f32m4_m(...) __riscv_vfneg_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfneg_v_f32m8(...) __riscv_vfneg_v_f32m8(__VA_ARGS__) |
| #define | vfneg_v_f32m8_m(...) __riscv_vfneg_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfneg_v_f32mf2(...) __riscv_vfneg_v_f32mf2(__VA_ARGS__) |
| #define | vfneg_v_f32mf2_m(...) __riscv_vfneg_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfneg_v_f64m1(...) __riscv_vfneg_v_f64m1(__VA_ARGS__) |
| #define | vfneg_v_f64m1_m(...) __riscv_vfneg_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfneg_v_f64m2(...) __riscv_vfneg_v_f64m2(__VA_ARGS__) |
| #define | vfneg_v_f64m2_m(...) __riscv_vfneg_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfneg_v_f64m4(...) __riscv_vfneg_v_f64m4(__VA_ARGS__) |
| #define | vfneg_v_f64m4_m(...) __riscv_vfneg_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfneg_v_f64m8(...) __riscv_vfneg_v_f64m8(__VA_ARGS__) |
| #define | vfneg_v_f64m8_m(...) __riscv_vfneg_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16m1(...) __riscv_vfnmacc_vf_f16m1_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16m1_m(...) __riscv_vfnmacc_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16m2(...) __riscv_vfnmacc_vf_f16m2_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16m2_m(...) __riscv_vfnmacc_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16m4(...) __riscv_vfnmacc_vf_f16m4_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16m4_m(...) __riscv_vfnmacc_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16m8(...) __riscv_vfnmacc_vf_f16m8_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16m8_m(...) __riscv_vfnmacc_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16mf2(...) __riscv_vfnmacc_vf_f16mf2_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16mf2_m(...) __riscv_vfnmacc_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16mf4(...) __riscv_vfnmacc_vf_f16mf4_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f16mf4_m(...) __riscv_vfnmacc_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32m1(...) __riscv_vfnmacc_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32m1_m(...) __riscv_vfnmacc_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32m2(...) __riscv_vfnmacc_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32m2_m(...) __riscv_vfnmacc_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32m4(...) __riscv_vfnmacc_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32m4_m(...) __riscv_vfnmacc_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32m8(...) __riscv_vfnmacc_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32m8_m(...) __riscv_vfnmacc_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32mf2(...) __riscv_vfnmacc_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f32mf2_m(...) __riscv_vfnmacc_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f64m1(...) __riscv_vfnmacc_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f64m1_m(...) __riscv_vfnmacc_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f64m2(...) __riscv_vfnmacc_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f64m2_m(...) __riscv_vfnmacc_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f64m4(...) __riscv_vfnmacc_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f64m4_m(...) __riscv_vfnmacc_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfnmacc_vf_f64m8(...) __riscv_vfnmacc_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfnmacc_vf_f64m8_m(...) __riscv_vfnmacc_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16m1(...) __riscv_vfnmacc_vv_f16m1_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16m1_m(...) __riscv_vfnmacc_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16m2(...) __riscv_vfnmacc_vv_f16m2_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16m2_m(...) __riscv_vfnmacc_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16m4(...) __riscv_vfnmacc_vv_f16m4_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16m4_m(...) __riscv_vfnmacc_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16m8(...) __riscv_vfnmacc_vv_f16m8_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16m8_m(...) __riscv_vfnmacc_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16mf2(...) __riscv_vfnmacc_vv_f16mf2_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16mf2_m(...) __riscv_vfnmacc_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16mf4(...) __riscv_vfnmacc_vv_f16mf4_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f16mf4_m(...) __riscv_vfnmacc_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32m1(...) __riscv_vfnmacc_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32m1_m(...) __riscv_vfnmacc_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32m2(...) __riscv_vfnmacc_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32m2_m(...) __riscv_vfnmacc_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32m4(...) __riscv_vfnmacc_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32m4_m(...) __riscv_vfnmacc_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32m8(...) __riscv_vfnmacc_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32m8_m(...) __riscv_vfnmacc_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32mf2(...) __riscv_vfnmacc_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f32mf2_m(...) __riscv_vfnmacc_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f64m1(...) __riscv_vfnmacc_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f64m1_m(...) __riscv_vfnmacc_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f64m2(...) __riscv_vfnmacc_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f64m2_m(...) __riscv_vfnmacc_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f64m4(...) __riscv_vfnmacc_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f64m4_m(...) __riscv_vfnmacc_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfnmacc_vv_f64m8(...) __riscv_vfnmacc_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfnmacc_vv_f64m8_m(...) __riscv_vfnmacc_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16m1(...) __riscv_vfnmadd_vf_f16m1_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16m1_m(...) __riscv_vfnmadd_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16m2(...) __riscv_vfnmadd_vf_f16m2_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16m2_m(...) __riscv_vfnmadd_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16m4(...) __riscv_vfnmadd_vf_f16m4_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16m4_m(...) __riscv_vfnmadd_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16m8(...) __riscv_vfnmadd_vf_f16m8_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16m8_m(...) __riscv_vfnmadd_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16mf2(...) __riscv_vfnmadd_vf_f16mf2_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16mf2_m(...) __riscv_vfnmadd_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16mf4(...) __riscv_vfnmadd_vf_f16mf4_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f16mf4_m(...) __riscv_vfnmadd_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32m1(...) __riscv_vfnmadd_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32m1_m(...) __riscv_vfnmadd_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32m2(...) __riscv_vfnmadd_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32m2_m(...) __riscv_vfnmadd_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32m4(...) __riscv_vfnmadd_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32m4_m(...) __riscv_vfnmadd_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32m8(...) __riscv_vfnmadd_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32m8_m(...) __riscv_vfnmadd_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32mf2(...) __riscv_vfnmadd_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f32mf2_m(...) __riscv_vfnmadd_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f64m1(...) __riscv_vfnmadd_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f64m1_m(...) __riscv_vfnmadd_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f64m2(...) __riscv_vfnmadd_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f64m2_m(...) __riscv_vfnmadd_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f64m4(...) __riscv_vfnmadd_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f64m4_m(...) __riscv_vfnmadd_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfnmadd_vf_f64m8(...) __riscv_vfnmadd_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfnmadd_vf_f64m8_m(...) __riscv_vfnmadd_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16m1(...) __riscv_vfnmadd_vv_f16m1_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16m1_m(...) __riscv_vfnmadd_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16m2(...) __riscv_vfnmadd_vv_f16m2_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16m2_m(...) __riscv_vfnmadd_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16m4(...) __riscv_vfnmadd_vv_f16m4_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16m4_m(...) __riscv_vfnmadd_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16m8(...) __riscv_vfnmadd_vv_f16m8_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16m8_m(...) __riscv_vfnmadd_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16mf2(...) __riscv_vfnmadd_vv_f16mf2_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16mf2_m(...) __riscv_vfnmadd_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16mf4(...) __riscv_vfnmadd_vv_f16mf4_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f16mf4_m(...) __riscv_vfnmadd_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32m1(...) __riscv_vfnmadd_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32m1_m(...) __riscv_vfnmadd_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32m2(...) __riscv_vfnmadd_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32m2_m(...) __riscv_vfnmadd_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32m4(...) __riscv_vfnmadd_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32m4_m(...) __riscv_vfnmadd_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32m8(...) __riscv_vfnmadd_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32m8_m(...) __riscv_vfnmadd_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32mf2(...) __riscv_vfnmadd_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f32mf2_m(...) __riscv_vfnmadd_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f64m1(...) __riscv_vfnmadd_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f64m1_m(...) __riscv_vfnmadd_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f64m2(...) __riscv_vfnmadd_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f64m2_m(...) __riscv_vfnmadd_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f64m4(...) __riscv_vfnmadd_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f64m4_m(...) __riscv_vfnmadd_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfnmadd_vv_f64m8(...) __riscv_vfnmadd_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfnmadd_vv_f64m8_m(...) __riscv_vfnmadd_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16m1(...) __riscv_vfnmsac_vf_f16m1_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16m1_m(...) __riscv_vfnmsac_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16m2(...) __riscv_vfnmsac_vf_f16m2_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16m2_m(...) __riscv_vfnmsac_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16m4(...) __riscv_vfnmsac_vf_f16m4_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16m4_m(...) __riscv_vfnmsac_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16m8(...) __riscv_vfnmsac_vf_f16m8_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16m8_m(...) __riscv_vfnmsac_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16mf2(...) __riscv_vfnmsac_vf_f16mf2_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16mf2_m(...) __riscv_vfnmsac_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16mf4(...) __riscv_vfnmsac_vf_f16mf4_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f16mf4_m(...) __riscv_vfnmsac_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32m1(...) __riscv_vfnmsac_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32m1_m(...) __riscv_vfnmsac_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32m2(...) __riscv_vfnmsac_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32m2_m(...) __riscv_vfnmsac_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32m4(...) __riscv_vfnmsac_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32m4_m(...) __riscv_vfnmsac_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32m8(...) __riscv_vfnmsac_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32m8_m(...) __riscv_vfnmsac_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32mf2(...) __riscv_vfnmsac_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f32mf2_m(...) __riscv_vfnmsac_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f64m1(...) __riscv_vfnmsac_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f64m1_m(...) __riscv_vfnmsac_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f64m2(...) __riscv_vfnmsac_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f64m2_m(...) __riscv_vfnmsac_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f64m4(...) __riscv_vfnmsac_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f64m4_m(...) __riscv_vfnmsac_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfnmsac_vf_f64m8(...) __riscv_vfnmsac_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfnmsac_vf_f64m8_m(...) __riscv_vfnmsac_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16m1(...) __riscv_vfnmsac_vv_f16m1_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16m1_m(...) __riscv_vfnmsac_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16m2(...) __riscv_vfnmsac_vv_f16m2_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16m2_m(...) __riscv_vfnmsac_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16m4(...) __riscv_vfnmsac_vv_f16m4_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16m4_m(...) __riscv_vfnmsac_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16m8(...) __riscv_vfnmsac_vv_f16m8_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16m8_m(...) __riscv_vfnmsac_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16mf2(...) __riscv_vfnmsac_vv_f16mf2_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16mf2_m(...) __riscv_vfnmsac_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16mf4(...) __riscv_vfnmsac_vv_f16mf4_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f16mf4_m(...) __riscv_vfnmsac_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32m1(...) __riscv_vfnmsac_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32m1_m(...) __riscv_vfnmsac_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32m2(...) __riscv_vfnmsac_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32m2_m(...) __riscv_vfnmsac_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32m4(...) __riscv_vfnmsac_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32m4_m(...) __riscv_vfnmsac_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32m8(...) __riscv_vfnmsac_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32m8_m(...) __riscv_vfnmsac_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32mf2(...) __riscv_vfnmsac_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f32mf2_m(...) __riscv_vfnmsac_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f64m1(...) __riscv_vfnmsac_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f64m1_m(...) __riscv_vfnmsac_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f64m2(...) __riscv_vfnmsac_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f64m2_m(...) __riscv_vfnmsac_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f64m4(...) __riscv_vfnmsac_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f64m4_m(...) __riscv_vfnmsac_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfnmsac_vv_f64m8(...) __riscv_vfnmsac_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfnmsac_vv_f64m8_m(...) __riscv_vfnmsac_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16m1(...) __riscv_vfnmsub_vf_f16m1_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16m1_m(...) __riscv_vfnmsub_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16m2(...) __riscv_vfnmsub_vf_f16m2_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16m2_m(...) __riscv_vfnmsub_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16m4(...) __riscv_vfnmsub_vf_f16m4_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16m4_m(...) __riscv_vfnmsub_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16m8(...) __riscv_vfnmsub_vf_f16m8_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16m8_m(...) __riscv_vfnmsub_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16mf2(...) __riscv_vfnmsub_vf_f16mf2_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16mf2_m(...) __riscv_vfnmsub_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16mf4(...) __riscv_vfnmsub_vf_f16mf4_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f16mf4_m(...) __riscv_vfnmsub_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32m1(...) __riscv_vfnmsub_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32m1_m(...) __riscv_vfnmsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32m2(...) __riscv_vfnmsub_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32m2_m(...) __riscv_vfnmsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32m4(...) __riscv_vfnmsub_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32m4_m(...) __riscv_vfnmsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32m8(...) __riscv_vfnmsub_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32m8_m(...) __riscv_vfnmsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32mf2(...) __riscv_vfnmsub_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f32mf2_m(...) __riscv_vfnmsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f64m1(...) __riscv_vfnmsub_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f64m1_m(...) __riscv_vfnmsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f64m2(...) __riscv_vfnmsub_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f64m2_m(...) __riscv_vfnmsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f64m4(...) __riscv_vfnmsub_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f64m4_m(...) __riscv_vfnmsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfnmsub_vf_f64m8(...) __riscv_vfnmsub_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfnmsub_vf_f64m8_m(...) __riscv_vfnmsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16m1(...) __riscv_vfnmsub_vv_f16m1_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16m1_m(...) __riscv_vfnmsub_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16m2(...) __riscv_vfnmsub_vv_f16m2_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16m2_m(...) __riscv_vfnmsub_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16m4(...) __riscv_vfnmsub_vv_f16m4_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16m4_m(...) __riscv_vfnmsub_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16m8(...) __riscv_vfnmsub_vv_f16m8_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16m8_m(...) __riscv_vfnmsub_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16mf2(...) __riscv_vfnmsub_vv_f16mf2_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16mf2_m(...) __riscv_vfnmsub_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16mf4(...) __riscv_vfnmsub_vv_f16mf4_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f16mf4_m(...) __riscv_vfnmsub_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32m1(...) __riscv_vfnmsub_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32m1_m(...) __riscv_vfnmsub_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32m2(...) __riscv_vfnmsub_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32m2_m(...) __riscv_vfnmsub_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32m4(...) __riscv_vfnmsub_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32m4_m(...) __riscv_vfnmsub_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32m8(...) __riscv_vfnmsub_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32m8_m(...) __riscv_vfnmsub_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32mf2(...) __riscv_vfnmsub_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f32mf2_m(...) __riscv_vfnmsub_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f64m1(...) __riscv_vfnmsub_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f64m1_m(...) __riscv_vfnmsub_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f64m2(...) __riscv_vfnmsub_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f64m2_m(...) __riscv_vfnmsub_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f64m4(...) __riscv_vfnmsub_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f64m4_m(...) __riscv_vfnmsub_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfnmsub_vv_f64m8(...) __riscv_vfnmsub_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfnmsub_vv_f64m8_m(...) __riscv_vfnmsub_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f16m1(...) __riscv_vfrdiv_vf_f16m1(__VA_ARGS__) |
| #define | vfrdiv_vf_f16m1_m(...) __riscv_vfrdiv_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f16m2(...) __riscv_vfrdiv_vf_f16m2(__VA_ARGS__) |
| #define | vfrdiv_vf_f16m2_m(...) __riscv_vfrdiv_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f16m4(...) __riscv_vfrdiv_vf_f16m4(__VA_ARGS__) |
| #define | vfrdiv_vf_f16m4_m(...) __riscv_vfrdiv_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f16m8(...) __riscv_vfrdiv_vf_f16m8(__VA_ARGS__) |
| #define | vfrdiv_vf_f16m8_m(...) __riscv_vfrdiv_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f16mf2(...) __riscv_vfrdiv_vf_f16mf2(__VA_ARGS__) |
| #define | vfrdiv_vf_f16mf2_m(...) __riscv_vfrdiv_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f16mf4(...) __riscv_vfrdiv_vf_f16mf4(__VA_ARGS__) |
| #define | vfrdiv_vf_f16mf4_m(...) __riscv_vfrdiv_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f32m1(...) __riscv_vfrdiv_vf_f32m1(__VA_ARGS__) |
| #define | vfrdiv_vf_f32m1_m(...) __riscv_vfrdiv_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f32m2(...) __riscv_vfrdiv_vf_f32m2(__VA_ARGS__) |
| #define | vfrdiv_vf_f32m2_m(...) __riscv_vfrdiv_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f32m4(...) __riscv_vfrdiv_vf_f32m4(__VA_ARGS__) |
| #define | vfrdiv_vf_f32m4_m(...) __riscv_vfrdiv_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f32m8(...) __riscv_vfrdiv_vf_f32m8(__VA_ARGS__) |
| #define | vfrdiv_vf_f32m8_m(...) __riscv_vfrdiv_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f32mf2(...) __riscv_vfrdiv_vf_f32mf2(__VA_ARGS__) |
| #define | vfrdiv_vf_f32mf2_m(...) __riscv_vfrdiv_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f64m1(...) __riscv_vfrdiv_vf_f64m1(__VA_ARGS__) |
| #define | vfrdiv_vf_f64m1_m(...) __riscv_vfrdiv_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f64m2(...) __riscv_vfrdiv_vf_f64m2(__VA_ARGS__) |
| #define | vfrdiv_vf_f64m2_m(...) __riscv_vfrdiv_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f64m4(...) __riscv_vfrdiv_vf_f64m4(__VA_ARGS__) |
| #define | vfrdiv_vf_f64m4_m(...) __riscv_vfrdiv_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfrdiv_vf_f64m8(...) __riscv_vfrdiv_vf_f64m8(__VA_ARGS__) |
| #define | vfrdiv_vf_f64m8_m(...) __riscv_vfrdiv_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f16m1(...) __riscv_vfrec7_v_f16m1(__VA_ARGS__) |
| #define | vfrec7_v_f16m1_m(...) __riscv_vfrec7_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f16m2(...) __riscv_vfrec7_v_f16m2(__VA_ARGS__) |
| #define | vfrec7_v_f16m2_m(...) __riscv_vfrec7_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f16m4(...) __riscv_vfrec7_v_f16m4(__VA_ARGS__) |
| #define | vfrec7_v_f16m4_m(...) __riscv_vfrec7_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f16m8(...) __riscv_vfrec7_v_f16m8(__VA_ARGS__) |
| #define | vfrec7_v_f16m8_m(...) __riscv_vfrec7_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f16mf2(...) __riscv_vfrec7_v_f16mf2(__VA_ARGS__) |
| #define | vfrec7_v_f16mf2_m(...) __riscv_vfrec7_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f16mf4(...) __riscv_vfrec7_v_f16mf4(__VA_ARGS__) |
| #define | vfrec7_v_f16mf4_m(...) __riscv_vfrec7_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f32m1(...) __riscv_vfrec7_v_f32m1(__VA_ARGS__) |
| #define | vfrec7_v_f32m1_m(...) __riscv_vfrec7_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f32m2(...) __riscv_vfrec7_v_f32m2(__VA_ARGS__) |
| #define | vfrec7_v_f32m2_m(...) __riscv_vfrec7_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f32m4(...) __riscv_vfrec7_v_f32m4(__VA_ARGS__) |
| #define | vfrec7_v_f32m4_m(...) __riscv_vfrec7_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f32m8(...) __riscv_vfrec7_v_f32m8(__VA_ARGS__) |
| #define | vfrec7_v_f32m8_m(...) __riscv_vfrec7_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f32mf2(...) __riscv_vfrec7_v_f32mf2(__VA_ARGS__) |
| #define | vfrec7_v_f32mf2_m(...) __riscv_vfrec7_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f64m1(...) __riscv_vfrec7_v_f64m1(__VA_ARGS__) |
| #define | vfrec7_v_f64m1_m(...) __riscv_vfrec7_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f64m2(...) __riscv_vfrec7_v_f64m2(__VA_ARGS__) |
| #define | vfrec7_v_f64m2_m(...) __riscv_vfrec7_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f64m4(...) __riscv_vfrec7_v_f64m4(__VA_ARGS__) |
| #define | vfrec7_v_f64m4_m(...) __riscv_vfrec7_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfrec7_v_f64m8(...) __riscv_vfrec7_v_f64m8(__VA_ARGS__) |
| #define | vfrec7_v_f64m8_m(...) __riscv_vfrec7_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfredmax_vs_f16m1_f16m1(...) __riscv_vfredmax_vs_f16m1_f16m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f16m1_f16m1_m(...) __riscv_vfredmax_vs_f16m1_f16m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f16m2_f16m1(...) __riscv_vfredmax_vs_f16m2_f16m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f16m2_f16m1_m(...) __riscv_vfredmax_vs_f16m2_f16m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f16m4_f16m1(...) __riscv_vfredmax_vs_f16m4_f16m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f16m4_f16m1_m(...) __riscv_vfredmax_vs_f16m4_f16m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f16m8_f16m1(...) __riscv_vfredmax_vs_f16m8_f16m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f16m8_f16m1_m(...) __riscv_vfredmax_vs_f16m8_f16m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f16mf2_f16m1(...) __riscv_vfredmax_vs_f16mf2_f16m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f16mf2_f16m1_m(...) __riscv_vfredmax_vs_f16mf2_f16m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f16mf4_f16m1(...) __riscv_vfredmax_vs_f16mf4_f16m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f16mf4_f16m1_m(...) __riscv_vfredmax_vs_f16mf4_f16m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f32m1_f32m1(...) __riscv_vfredmax_vs_f32m1_f32m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f32m1_f32m1_m(...) __riscv_vfredmax_vs_f32m1_f32m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f32m2_f32m1(...) __riscv_vfredmax_vs_f32m2_f32m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f32m2_f32m1_m(...) __riscv_vfredmax_vs_f32m2_f32m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f32m4_f32m1(...) __riscv_vfredmax_vs_f32m4_f32m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f32m4_f32m1_m(...) __riscv_vfredmax_vs_f32m4_f32m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f32m8_f32m1(...) __riscv_vfredmax_vs_f32m8_f32m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f32m8_f32m1_m(...) __riscv_vfredmax_vs_f32m8_f32m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f32mf2_f32m1(...) __riscv_vfredmax_vs_f32mf2_f32m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f32mf2_f32m1_m(...) __riscv_vfredmax_vs_f32mf2_f32m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f64m1_f64m1(...) __riscv_vfredmax_vs_f64m1_f64m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f64m1_f64m1_m(...) __riscv_vfredmax_vs_f64m1_f64m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f64m2_f64m1(...) __riscv_vfredmax_vs_f64m2_f64m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f64m2_f64m1_m(...) __riscv_vfredmax_vs_f64m2_f64m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f64m4_f64m1(...) __riscv_vfredmax_vs_f64m4_f64m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f64m4_f64m1_m(...) __riscv_vfredmax_vs_f64m4_f64m1_tum(__VA_ARGS__) |
| #define | vfredmax_vs_f64m8_f64m1(...) __riscv_vfredmax_vs_f64m8_f64m1_tu(__VA_ARGS__) |
| #define | vfredmax_vs_f64m8_f64m1_m(...) __riscv_vfredmax_vs_f64m8_f64m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f16m1_f16m1(...) __riscv_vfredmin_vs_f16m1_f16m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f16m1_f16m1_m(...) __riscv_vfredmin_vs_f16m1_f16m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f16m2_f16m1(...) __riscv_vfredmin_vs_f16m2_f16m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f16m2_f16m1_m(...) __riscv_vfredmin_vs_f16m2_f16m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f16m4_f16m1(...) __riscv_vfredmin_vs_f16m4_f16m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f16m4_f16m1_m(...) __riscv_vfredmin_vs_f16m4_f16m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f16m8_f16m1(...) __riscv_vfredmin_vs_f16m8_f16m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f16m8_f16m1_m(...) __riscv_vfredmin_vs_f16m8_f16m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f16mf2_f16m1(...) __riscv_vfredmin_vs_f16mf2_f16m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f16mf2_f16m1_m(...) __riscv_vfredmin_vs_f16mf2_f16m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f16mf4_f16m1(...) __riscv_vfredmin_vs_f16mf4_f16m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f16mf4_f16m1_m(...) __riscv_vfredmin_vs_f16mf4_f16m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f32m1_f32m1(...) __riscv_vfredmin_vs_f32m1_f32m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f32m1_f32m1_m(...) __riscv_vfredmin_vs_f32m1_f32m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f32m2_f32m1(...) __riscv_vfredmin_vs_f32m2_f32m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f32m2_f32m1_m(...) __riscv_vfredmin_vs_f32m2_f32m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f32m4_f32m1(...) __riscv_vfredmin_vs_f32m4_f32m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f32m4_f32m1_m(...) __riscv_vfredmin_vs_f32m4_f32m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f32m8_f32m1(...) __riscv_vfredmin_vs_f32m8_f32m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f32m8_f32m1_m(...) __riscv_vfredmin_vs_f32m8_f32m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f32mf2_f32m1(...) __riscv_vfredmin_vs_f32mf2_f32m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f32mf2_f32m1_m(...) __riscv_vfredmin_vs_f32mf2_f32m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f64m1_f64m1(...) __riscv_vfredmin_vs_f64m1_f64m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f64m1_f64m1_m(...) __riscv_vfredmin_vs_f64m1_f64m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f64m2_f64m1(...) __riscv_vfredmin_vs_f64m2_f64m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f64m2_f64m1_m(...) __riscv_vfredmin_vs_f64m2_f64m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f64m4_f64m1(...) __riscv_vfredmin_vs_f64m4_f64m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f64m4_f64m1_m(...) __riscv_vfredmin_vs_f64m4_f64m1_tum(__VA_ARGS__) |
| #define | vfredmin_vs_f64m8_f64m1(...) __riscv_vfredmin_vs_f64m8_f64m1_tu(__VA_ARGS__) |
| #define | vfredmin_vs_f64m8_f64m1_m(...) __riscv_vfredmin_vs_f64m8_f64m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f16m1_f16m1(...) __riscv_vfredosum_vs_f16m1_f16m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f16m1_f16m1_m(...) __riscv_vfredosum_vs_f16m1_f16m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f16m2_f16m1(...) __riscv_vfredosum_vs_f16m2_f16m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f16m2_f16m1_m(...) __riscv_vfredosum_vs_f16m2_f16m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f16m4_f16m1(...) __riscv_vfredosum_vs_f16m4_f16m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f16m4_f16m1_m(...) __riscv_vfredosum_vs_f16m4_f16m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f16m8_f16m1(...) __riscv_vfredosum_vs_f16m8_f16m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f16m8_f16m1_m(...) __riscv_vfredosum_vs_f16m8_f16m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f16mf2_f16m1(...) __riscv_vfredosum_vs_f16mf2_f16m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f16mf2_f16m1_m(...) __riscv_vfredosum_vs_f16mf2_f16m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f16mf4_f16m1(...) __riscv_vfredosum_vs_f16mf4_f16m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f16mf4_f16m1_m(...) __riscv_vfredosum_vs_f16mf4_f16m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f32m1_f32m1(...) __riscv_vfredosum_vs_f32m1_f32m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f32m1_f32m1_m(...) __riscv_vfredosum_vs_f32m1_f32m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f32m2_f32m1(...) __riscv_vfredosum_vs_f32m2_f32m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f32m2_f32m1_m(...) __riscv_vfredosum_vs_f32m2_f32m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f32m4_f32m1(...) __riscv_vfredosum_vs_f32m4_f32m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f32m4_f32m1_m(...) __riscv_vfredosum_vs_f32m4_f32m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f32m8_f32m1(...) __riscv_vfredosum_vs_f32m8_f32m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f32m8_f32m1_m(...) __riscv_vfredosum_vs_f32m8_f32m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f32mf2_f32m1(...) __riscv_vfredosum_vs_f32mf2_f32m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f32mf2_f32m1_m(...) __riscv_vfredosum_vs_f32mf2_f32m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f64m1_f64m1(...) __riscv_vfredosum_vs_f64m1_f64m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f64m1_f64m1_m(...) __riscv_vfredosum_vs_f64m1_f64m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f64m2_f64m1(...) __riscv_vfredosum_vs_f64m2_f64m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f64m2_f64m1_m(...) __riscv_vfredosum_vs_f64m2_f64m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f64m4_f64m1(...) __riscv_vfredosum_vs_f64m4_f64m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f64m4_f64m1_m(...) __riscv_vfredosum_vs_f64m4_f64m1_tum(__VA_ARGS__) |
| #define | vfredosum_vs_f64m8_f64m1(...) __riscv_vfredosum_vs_f64m8_f64m1_tu(__VA_ARGS__) |
| #define | vfredosum_vs_f64m8_f64m1_m(...) __riscv_vfredosum_vs_f64m8_f64m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f16m1_f16m1(...) __riscv_vfredusum_vs_f16m1_f16m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f16m1_f16m1_m(...) __riscv_vfredusum_vs_f16m1_f16m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f16m2_f16m1(...) __riscv_vfredusum_vs_f16m2_f16m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f16m2_f16m1_m(...) __riscv_vfredusum_vs_f16m2_f16m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f16m4_f16m1(...) __riscv_vfredusum_vs_f16m4_f16m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f16m4_f16m1_m(...) __riscv_vfredusum_vs_f16m4_f16m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f16m8_f16m1(...) __riscv_vfredusum_vs_f16m8_f16m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f16m8_f16m1_m(...) __riscv_vfredusum_vs_f16m8_f16m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f16mf2_f16m1(...) __riscv_vfredusum_vs_f16mf2_f16m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f16mf2_f16m1_m(...) __riscv_vfredusum_vs_f16mf2_f16m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f16mf4_f16m1(...) __riscv_vfredusum_vs_f16mf4_f16m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f16mf4_f16m1_m(...) __riscv_vfredusum_vs_f16mf4_f16m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f32m1_f32m1(...) __riscv_vfredusum_vs_f32m1_f32m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f32m1_f32m1_m(...) __riscv_vfredusum_vs_f32m1_f32m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f32m2_f32m1(...) __riscv_vfredusum_vs_f32m2_f32m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f32m2_f32m1_m(...) __riscv_vfredusum_vs_f32m2_f32m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f32m4_f32m1(...) __riscv_vfredusum_vs_f32m4_f32m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f32m4_f32m1_m(...) __riscv_vfredusum_vs_f32m4_f32m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f32m8_f32m1(...) __riscv_vfredusum_vs_f32m8_f32m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f32m8_f32m1_m(...) __riscv_vfredusum_vs_f32m8_f32m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f32mf2_f32m1(...) __riscv_vfredusum_vs_f32mf2_f32m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f32mf2_f32m1_m(...) __riscv_vfredusum_vs_f32mf2_f32m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f64m1_f64m1(...) __riscv_vfredusum_vs_f64m1_f64m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f64m1_f64m1_m(...) __riscv_vfredusum_vs_f64m1_f64m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f64m2_f64m1(...) __riscv_vfredusum_vs_f64m2_f64m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f64m2_f64m1_m(...) __riscv_vfredusum_vs_f64m2_f64m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f64m4_f64m1(...) __riscv_vfredusum_vs_f64m4_f64m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f64m4_f64m1_m(...) __riscv_vfredusum_vs_f64m4_f64m1_tum(__VA_ARGS__) |
| #define | vfredusum_vs_f64m8_f64m1(...) __riscv_vfredusum_vs_f64m8_f64m1_tu(__VA_ARGS__) |
| #define | vfredusum_vs_f64m8_f64m1_m(...) __riscv_vfredusum_vs_f64m8_f64m1_tum(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16m1(...) __riscv_vfrsqrt7_v_f16m1(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16m1_m(...) __riscv_vfrsqrt7_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16m2(...) __riscv_vfrsqrt7_v_f16m2(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16m2_m(...) __riscv_vfrsqrt7_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16m4(...) __riscv_vfrsqrt7_v_f16m4(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16m4_m(...) __riscv_vfrsqrt7_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16m8(...) __riscv_vfrsqrt7_v_f16m8(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16m8_m(...) __riscv_vfrsqrt7_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16mf2(...) __riscv_vfrsqrt7_v_f16mf2(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16mf2_m(...) __riscv_vfrsqrt7_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16mf4(...) __riscv_vfrsqrt7_v_f16mf4(__VA_ARGS__) |
| #define | vfrsqrt7_v_f16mf4_m(...) __riscv_vfrsqrt7_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32m1(...) __riscv_vfrsqrt7_v_f32m1(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32m1_m(...) __riscv_vfrsqrt7_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32m2(...) __riscv_vfrsqrt7_v_f32m2(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32m2_m(...) __riscv_vfrsqrt7_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32m4(...) __riscv_vfrsqrt7_v_f32m4(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32m4_m(...) __riscv_vfrsqrt7_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32m8(...) __riscv_vfrsqrt7_v_f32m8(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32m8_m(...) __riscv_vfrsqrt7_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32mf2(...) __riscv_vfrsqrt7_v_f32mf2(__VA_ARGS__) |
| #define | vfrsqrt7_v_f32mf2_m(...) __riscv_vfrsqrt7_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f64m1(...) __riscv_vfrsqrt7_v_f64m1(__VA_ARGS__) |
| #define | vfrsqrt7_v_f64m1_m(...) __riscv_vfrsqrt7_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f64m2(...) __riscv_vfrsqrt7_v_f64m2(__VA_ARGS__) |
| #define | vfrsqrt7_v_f64m2_m(...) __riscv_vfrsqrt7_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f64m4(...) __riscv_vfrsqrt7_v_f64m4(__VA_ARGS__) |
| #define | vfrsqrt7_v_f64m4_m(...) __riscv_vfrsqrt7_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfrsqrt7_v_f64m8(...) __riscv_vfrsqrt7_v_f64m8(__VA_ARGS__) |
| #define | vfrsqrt7_v_f64m8_m(...) __riscv_vfrsqrt7_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f16m1(...) __riscv_vfrsub_vf_f16m1(__VA_ARGS__) |
| #define | vfrsub_vf_f16m1_m(...) __riscv_vfrsub_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f16m2(...) __riscv_vfrsub_vf_f16m2(__VA_ARGS__) |
| #define | vfrsub_vf_f16m2_m(...) __riscv_vfrsub_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f16m4(...) __riscv_vfrsub_vf_f16m4(__VA_ARGS__) |
| #define | vfrsub_vf_f16m4_m(...) __riscv_vfrsub_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f16m8(...) __riscv_vfrsub_vf_f16m8(__VA_ARGS__) |
| #define | vfrsub_vf_f16m8_m(...) __riscv_vfrsub_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f16mf2(...) __riscv_vfrsub_vf_f16mf2(__VA_ARGS__) |
| #define | vfrsub_vf_f16mf2_m(...) __riscv_vfrsub_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f16mf4(...) __riscv_vfrsub_vf_f16mf4(__VA_ARGS__) |
| #define | vfrsub_vf_f16mf4_m(...) __riscv_vfrsub_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f32m1(...) __riscv_vfrsub_vf_f32m1(__VA_ARGS__) |
| #define | vfrsub_vf_f32m1_m(...) __riscv_vfrsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f32m2(...) __riscv_vfrsub_vf_f32m2(__VA_ARGS__) |
| #define | vfrsub_vf_f32m2_m(...) __riscv_vfrsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f32m4(...) __riscv_vfrsub_vf_f32m4(__VA_ARGS__) |
| #define | vfrsub_vf_f32m4_m(...) __riscv_vfrsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f32m8(...) __riscv_vfrsub_vf_f32m8(__VA_ARGS__) |
| #define | vfrsub_vf_f32m8_m(...) __riscv_vfrsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f32mf2(...) __riscv_vfrsub_vf_f32mf2(__VA_ARGS__) |
| #define | vfrsub_vf_f32mf2_m(...) __riscv_vfrsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f64m1(...) __riscv_vfrsub_vf_f64m1(__VA_ARGS__) |
| #define | vfrsub_vf_f64m1_m(...) __riscv_vfrsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f64m2(...) __riscv_vfrsub_vf_f64m2(__VA_ARGS__) |
| #define | vfrsub_vf_f64m2_m(...) __riscv_vfrsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f64m4(...) __riscv_vfrsub_vf_f64m4(__VA_ARGS__) |
| #define | vfrsub_vf_f64m4_m(...) __riscv_vfrsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfrsub_vf_f64m8(...) __riscv_vfrsub_vf_f64m8(__VA_ARGS__) |
| #define | vfrsub_vf_f64m8_m(...) __riscv_vfrsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f16m1(...) __riscv_vfsgnj_vf_f16m1(__VA_ARGS__) |
| #define | vfsgnj_vf_f16m1_m(...) __riscv_vfsgnj_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f16m2(...) __riscv_vfsgnj_vf_f16m2(__VA_ARGS__) |
| #define | vfsgnj_vf_f16m2_m(...) __riscv_vfsgnj_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f16m4(...) __riscv_vfsgnj_vf_f16m4(__VA_ARGS__) |
| #define | vfsgnj_vf_f16m4_m(...) __riscv_vfsgnj_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f16m8(...) __riscv_vfsgnj_vf_f16m8(__VA_ARGS__) |
| #define | vfsgnj_vf_f16m8_m(...) __riscv_vfsgnj_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f16mf2(...) __riscv_vfsgnj_vf_f16mf2(__VA_ARGS__) |
| #define | vfsgnj_vf_f16mf2_m(...) __riscv_vfsgnj_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f16mf4(...) __riscv_vfsgnj_vf_f16mf4(__VA_ARGS__) |
| #define | vfsgnj_vf_f16mf4_m(...) __riscv_vfsgnj_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f32m1(...) __riscv_vfsgnj_vf_f32m1(__VA_ARGS__) |
| #define | vfsgnj_vf_f32m1_m(...) __riscv_vfsgnj_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f32m2(...) __riscv_vfsgnj_vf_f32m2(__VA_ARGS__) |
| #define | vfsgnj_vf_f32m2_m(...) __riscv_vfsgnj_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f32m4(...) __riscv_vfsgnj_vf_f32m4(__VA_ARGS__) |
| #define | vfsgnj_vf_f32m4_m(...) __riscv_vfsgnj_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f32m8(...) __riscv_vfsgnj_vf_f32m8(__VA_ARGS__) |
| #define | vfsgnj_vf_f32m8_m(...) __riscv_vfsgnj_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f32mf2(...) __riscv_vfsgnj_vf_f32mf2(__VA_ARGS__) |
| #define | vfsgnj_vf_f32mf2_m(...) __riscv_vfsgnj_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f64m1(...) __riscv_vfsgnj_vf_f64m1(__VA_ARGS__) |
| #define | vfsgnj_vf_f64m1_m(...) __riscv_vfsgnj_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f64m2(...) __riscv_vfsgnj_vf_f64m2(__VA_ARGS__) |
| #define | vfsgnj_vf_f64m2_m(...) __riscv_vfsgnj_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f64m4(...) __riscv_vfsgnj_vf_f64m4(__VA_ARGS__) |
| #define | vfsgnj_vf_f64m4_m(...) __riscv_vfsgnj_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfsgnj_vf_f64m8(...) __riscv_vfsgnj_vf_f64m8(__VA_ARGS__) |
| #define | vfsgnj_vf_f64m8_m(...) __riscv_vfsgnj_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f16m1(...) __riscv_vfsgnj_vv_f16m1(__VA_ARGS__) |
| #define | vfsgnj_vv_f16m1_m(...) __riscv_vfsgnj_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f16m2(...) __riscv_vfsgnj_vv_f16m2(__VA_ARGS__) |
| #define | vfsgnj_vv_f16m2_m(...) __riscv_vfsgnj_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f16m4(...) __riscv_vfsgnj_vv_f16m4(__VA_ARGS__) |
| #define | vfsgnj_vv_f16m4_m(...) __riscv_vfsgnj_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f16m8(...) __riscv_vfsgnj_vv_f16m8(__VA_ARGS__) |
| #define | vfsgnj_vv_f16m8_m(...) __riscv_vfsgnj_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f16mf2(...) __riscv_vfsgnj_vv_f16mf2(__VA_ARGS__) |
| #define | vfsgnj_vv_f16mf2_m(...) __riscv_vfsgnj_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f16mf4(...) __riscv_vfsgnj_vv_f16mf4(__VA_ARGS__) |
| #define | vfsgnj_vv_f16mf4_m(...) __riscv_vfsgnj_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f32m1(...) __riscv_vfsgnj_vv_f32m1(__VA_ARGS__) |
| #define | vfsgnj_vv_f32m1_m(...) __riscv_vfsgnj_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f32m2(...) __riscv_vfsgnj_vv_f32m2(__VA_ARGS__) |
| #define | vfsgnj_vv_f32m2_m(...) __riscv_vfsgnj_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f32m4(...) __riscv_vfsgnj_vv_f32m4(__VA_ARGS__) |
| #define | vfsgnj_vv_f32m4_m(...) __riscv_vfsgnj_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f32m8(...) __riscv_vfsgnj_vv_f32m8(__VA_ARGS__) |
| #define | vfsgnj_vv_f32m8_m(...) __riscv_vfsgnj_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f32mf2(...) __riscv_vfsgnj_vv_f32mf2(__VA_ARGS__) |
| #define | vfsgnj_vv_f32mf2_m(...) __riscv_vfsgnj_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f64m1(...) __riscv_vfsgnj_vv_f64m1(__VA_ARGS__) |
| #define | vfsgnj_vv_f64m1_m(...) __riscv_vfsgnj_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f64m2(...) __riscv_vfsgnj_vv_f64m2(__VA_ARGS__) |
| #define | vfsgnj_vv_f64m2_m(...) __riscv_vfsgnj_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f64m4(...) __riscv_vfsgnj_vv_f64m4(__VA_ARGS__) |
| #define | vfsgnj_vv_f64m4_m(...) __riscv_vfsgnj_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfsgnj_vv_f64m8(...) __riscv_vfsgnj_vv_f64m8(__VA_ARGS__) |
| #define | vfsgnj_vv_f64m8_m(...) __riscv_vfsgnj_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16m1(...) __riscv_vfsgnjn_vf_f16m1(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16m1_m(...) __riscv_vfsgnjn_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16m2(...) __riscv_vfsgnjn_vf_f16m2(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16m2_m(...) __riscv_vfsgnjn_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16m4(...) __riscv_vfsgnjn_vf_f16m4(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16m4_m(...) __riscv_vfsgnjn_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16m8(...) __riscv_vfsgnjn_vf_f16m8(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16m8_m(...) __riscv_vfsgnjn_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16mf2(...) __riscv_vfsgnjn_vf_f16mf2(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16mf2_m(...) __riscv_vfsgnjn_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16mf4(...) __riscv_vfsgnjn_vf_f16mf4(__VA_ARGS__) |
| #define | vfsgnjn_vf_f16mf4_m(...) __riscv_vfsgnjn_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32m1(...) __riscv_vfsgnjn_vf_f32m1(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32m1_m(...) __riscv_vfsgnjn_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32m2(...) __riscv_vfsgnjn_vf_f32m2(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32m2_m(...) __riscv_vfsgnjn_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32m4(...) __riscv_vfsgnjn_vf_f32m4(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32m4_m(...) __riscv_vfsgnjn_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32m8(...) __riscv_vfsgnjn_vf_f32m8(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32m8_m(...) __riscv_vfsgnjn_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32mf2(...) __riscv_vfsgnjn_vf_f32mf2(__VA_ARGS__) |
| #define | vfsgnjn_vf_f32mf2_m(...) __riscv_vfsgnjn_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f64m1(...) __riscv_vfsgnjn_vf_f64m1(__VA_ARGS__) |
| #define | vfsgnjn_vf_f64m1_m(...) __riscv_vfsgnjn_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f64m2(...) __riscv_vfsgnjn_vf_f64m2(__VA_ARGS__) |
| #define | vfsgnjn_vf_f64m2_m(...) __riscv_vfsgnjn_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f64m4(...) __riscv_vfsgnjn_vf_f64m4(__VA_ARGS__) |
| #define | vfsgnjn_vf_f64m4_m(...) __riscv_vfsgnjn_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vf_f64m8(...) __riscv_vfsgnjn_vf_f64m8(__VA_ARGS__) |
| #define | vfsgnjn_vf_f64m8_m(...) __riscv_vfsgnjn_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16m1(...) __riscv_vfsgnjn_vv_f16m1(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16m1_m(...) __riscv_vfsgnjn_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16m2(...) __riscv_vfsgnjn_vv_f16m2(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16m2_m(...) __riscv_vfsgnjn_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16m4(...) __riscv_vfsgnjn_vv_f16m4(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16m4_m(...) __riscv_vfsgnjn_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16m8(...) __riscv_vfsgnjn_vv_f16m8(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16m8_m(...) __riscv_vfsgnjn_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16mf2(...) __riscv_vfsgnjn_vv_f16mf2(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16mf2_m(...) __riscv_vfsgnjn_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16mf4(...) __riscv_vfsgnjn_vv_f16mf4(__VA_ARGS__) |
| #define | vfsgnjn_vv_f16mf4_m(...) __riscv_vfsgnjn_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32m1(...) __riscv_vfsgnjn_vv_f32m1(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32m1_m(...) __riscv_vfsgnjn_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32m2(...) __riscv_vfsgnjn_vv_f32m2(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32m2_m(...) __riscv_vfsgnjn_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32m4(...) __riscv_vfsgnjn_vv_f32m4(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32m4_m(...) __riscv_vfsgnjn_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32m8(...) __riscv_vfsgnjn_vv_f32m8(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32m8_m(...) __riscv_vfsgnjn_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32mf2(...) __riscv_vfsgnjn_vv_f32mf2(__VA_ARGS__) |
| #define | vfsgnjn_vv_f32mf2_m(...) __riscv_vfsgnjn_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f64m1(...) __riscv_vfsgnjn_vv_f64m1(__VA_ARGS__) |
| #define | vfsgnjn_vv_f64m1_m(...) __riscv_vfsgnjn_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f64m2(...) __riscv_vfsgnjn_vv_f64m2(__VA_ARGS__) |
| #define | vfsgnjn_vv_f64m2_m(...) __riscv_vfsgnjn_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f64m4(...) __riscv_vfsgnjn_vv_f64m4(__VA_ARGS__) |
| #define | vfsgnjn_vv_f64m4_m(...) __riscv_vfsgnjn_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfsgnjn_vv_f64m8(...) __riscv_vfsgnjn_vv_f64m8(__VA_ARGS__) |
| #define | vfsgnjn_vv_f64m8_m(...) __riscv_vfsgnjn_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16m1(...) __riscv_vfsgnjx_vf_f16m1(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16m1_m(...) __riscv_vfsgnjx_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16m2(...) __riscv_vfsgnjx_vf_f16m2(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16m2_m(...) __riscv_vfsgnjx_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16m4(...) __riscv_vfsgnjx_vf_f16m4(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16m4_m(...) __riscv_vfsgnjx_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16m8(...) __riscv_vfsgnjx_vf_f16m8(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16m8_m(...) __riscv_vfsgnjx_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16mf2(...) __riscv_vfsgnjx_vf_f16mf2(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16mf2_m(...) __riscv_vfsgnjx_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16mf4(...) __riscv_vfsgnjx_vf_f16mf4(__VA_ARGS__) |
| #define | vfsgnjx_vf_f16mf4_m(...) __riscv_vfsgnjx_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32m1(...) __riscv_vfsgnjx_vf_f32m1(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32m1_m(...) __riscv_vfsgnjx_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32m2(...) __riscv_vfsgnjx_vf_f32m2(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32m2_m(...) __riscv_vfsgnjx_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32m4(...) __riscv_vfsgnjx_vf_f32m4(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32m4_m(...) __riscv_vfsgnjx_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32m8(...) __riscv_vfsgnjx_vf_f32m8(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32m8_m(...) __riscv_vfsgnjx_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32mf2(...) __riscv_vfsgnjx_vf_f32mf2(__VA_ARGS__) |
| #define | vfsgnjx_vf_f32mf2_m(...) __riscv_vfsgnjx_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f64m1(...) __riscv_vfsgnjx_vf_f64m1(__VA_ARGS__) |
| #define | vfsgnjx_vf_f64m1_m(...) __riscv_vfsgnjx_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f64m2(...) __riscv_vfsgnjx_vf_f64m2(__VA_ARGS__) |
| #define | vfsgnjx_vf_f64m2_m(...) __riscv_vfsgnjx_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f64m4(...) __riscv_vfsgnjx_vf_f64m4(__VA_ARGS__) |
| #define | vfsgnjx_vf_f64m4_m(...) __riscv_vfsgnjx_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vf_f64m8(...) __riscv_vfsgnjx_vf_f64m8(__VA_ARGS__) |
| #define | vfsgnjx_vf_f64m8_m(...) __riscv_vfsgnjx_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16m1(...) __riscv_vfsgnjx_vv_f16m1(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16m1_m(...) __riscv_vfsgnjx_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16m2(...) __riscv_vfsgnjx_vv_f16m2(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16m2_m(...) __riscv_vfsgnjx_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16m4(...) __riscv_vfsgnjx_vv_f16m4(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16m4_m(...) __riscv_vfsgnjx_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16m8(...) __riscv_vfsgnjx_vv_f16m8(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16m8_m(...) __riscv_vfsgnjx_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16mf2(...) __riscv_vfsgnjx_vv_f16mf2(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16mf2_m(...) __riscv_vfsgnjx_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16mf4(...) __riscv_vfsgnjx_vv_f16mf4(__VA_ARGS__) |
| #define | vfsgnjx_vv_f16mf4_m(...) __riscv_vfsgnjx_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32m1(...) __riscv_vfsgnjx_vv_f32m1(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32m1_m(...) __riscv_vfsgnjx_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32m2(...) __riscv_vfsgnjx_vv_f32m2(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32m2_m(...) __riscv_vfsgnjx_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32m4(...) __riscv_vfsgnjx_vv_f32m4(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32m4_m(...) __riscv_vfsgnjx_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32m8(...) __riscv_vfsgnjx_vv_f32m8(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32m8_m(...) __riscv_vfsgnjx_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32mf2(...) __riscv_vfsgnjx_vv_f32mf2(__VA_ARGS__) |
| #define | vfsgnjx_vv_f32mf2_m(...) __riscv_vfsgnjx_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f64m1(...) __riscv_vfsgnjx_vv_f64m1(__VA_ARGS__) |
| #define | vfsgnjx_vv_f64m1_m(...) __riscv_vfsgnjx_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f64m2(...) __riscv_vfsgnjx_vv_f64m2(__VA_ARGS__) |
| #define | vfsgnjx_vv_f64m2_m(...) __riscv_vfsgnjx_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f64m4(...) __riscv_vfsgnjx_vv_f64m4(__VA_ARGS__) |
| #define | vfsgnjx_vv_f64m4_m(...) __riscv_vfsgnjx_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfsgnjx_vv_f64m8(...) __riscv_vfsgnjx_vv_f64m8(__VA_ARGS__) |
| #define | vfsgnjx_vv_f64m8_m(...) __riscv_vfsgnjx_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f16m1(...) __riscv_vfslide1down_vf_f16m1(__VA_ARGS__) |
| #define | vfslide1down_vf_f16m1_m(...) __riscv_vfslide1down_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f16m2(...) __riscv_vfslide1down_vf_f16m2(__VA_ARGS__) |
| #define | vfslide1down_vf_f16m2_m(...) __riscv_vfslide1down_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f16m4(...) __riscv_vfslide1down_vf_f16m4(__VA_ARGS__) |
| #define | vfslide1down_vf_f16m4_m(...) __riscv_vfslide1down_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f16m8(...) __riscv_vfslide1down_vf_f16m8(__VA_ARGS__) |
| #define | vfslide1down_vf_f16m8_m(...) __riscv_vfslide1down_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f16mf2(...) __riscv_vfslide1down_vf_f16mf2(__VA_ARGS__) |
| #define | vfslide1down_vf_f16mf2_m(...) __riscv_vfslide1down_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f16mf4(...) __riscv_vfslide1down_vf_f16mf4(__VA_ARGS__) |
| #define | vfslide1down_vf_f16mf4_m(...) __riscv_vfslide1down_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f32m1(...) __riscv_vfslide1down_vf_f32m1(__VA_ARGS__) |
| #define | vfslide1down_vf_f32m1_m(...) __riscv_vfslide1down_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f32m2(...) __riscv_vfslide1down_vf_f32m2(__VA_ARGS__) |
| #define | vfslide1down_vf_f32m2_m(...) __riscv_vfslide1down_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f32m4(...) __riscv_vfslide1down_vf_f32m4(__VA_ARGS__) |
| #define | vfslide1down_vf_f32m4_m(...) __riscv_vfslide1down_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f32m8(...) __riscv_vfslide1down_vf_f32m8(__VA_ARGS__) |
| #define | vfslide1down_vf_f32m8_m(...) __riscv_vfslide1down_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f32mf2(...) __riscv_vfslide1down_vf_f32mf2(__VA_ARGS__) |
| #define | vfslide1down_vf_f32mf2_m(...) __riscv_vfslide1down_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f64m1(...) __riscv_vfslide1down_vf_f64m1(__VA_ARGS__) |
| #define | vfslide1down_vf_f64m1_m(...) __riscv_vfslide1down_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f64m2(...) __riscv_vfslide1down_vf_f64m2(__VA_ARGS__) |
| #define | vfslide1down_vf_f64m2_m(...) __riscv_vfslide1down_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f64m4(...) __riscv_vfslide1down_vf_f64m4(__VA_ARGS__) |
| #define | vfslide1down_vf_f64m4_m(...) __riscv_vfslide1down_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfslide1down_vf_f64m8(...) __riscv_vfslide1down_vf_f64m8(__VA_ARGS__) |
| #define | vfslide1down_vf_f64m8_m(...) __riscv_vfslide1down_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f16m1(...) __riscv_vfslide1up_vf_f16m1(__VA_ARGS__) |
| #define | vfslide1up_vf_f16m1_m(...) __riscv_vfslide1up_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f16m2(...) __riscv_vfslide1up_vf_f16m2(__VA_ARGS__) |
| #define | vfslide1up_vf_f16m2_m(...) __riscv_vfslide1up_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f16m4(...) __riscv_vfslide1up_vf_f16m4(__VA_ARGS__) |
| #define | vfslide1up_vf_f16m4_m(...) __riscv_vfslide1up_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f16m8(...) __riscv_vfslide1up_vf_f16m8(__VA_ARGS__) |
| #define | vfslide1up_vf_f16m8_m(...) __riscv_vfslide1up_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f16mf2(...) __riscv_vfslide1up_vf_f16mf2(__VA_ARGS__) |
| #define | vfslide1up_vf_f16mf2_m(...) __riscv_vfslide1up_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f16mf4(...) __riscv_vfslide1up_vf_f16mf4(__VA_ARGS__) |
| #define | vfslide1up_vf_f16mf4_m(...) __riscv_vfslide1up_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f32m1(...) __riscv_vfslide1up_vf_f32m1(__VA_ARGS__) |
| #define | vfslide1up_vf_f32m1_m(...) __riscv_vfslide1up_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f32m2(...) __riscv_vfslide1up_vf_f32m2(__VA_ARGS__) |
| #define | vfslide1up_vf_f32m2_m(...) __riscv_vfslide1up_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f32m4(...) __riscv_vfslide1up_vf_f32m4(__VA_ARGS__) |
| #define | vfslide1up_vf_f32m4_m(...) __riscv_vfslide1up_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f32m8(...) __riscv_vfslide1up_vf_f32m8(__VA_ARGS__) |
| #define | vfslide1up_vf_f32m8_m(...) __riscv_vfslide1up_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f32mf2(...) __riscv_vfslide1up_vf_f32mf2(__VA_ARGS__) |
| #define | vfslide1up_vf_f32mf2_m(...) __riscv_vfslide1up_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f64m1(...) __riscv_vfslide1up_vf_f64m1(__VA_ARGS__) |
| #define | vfslide1up_vf_f64m1_m(...) __riscv_vfslide1up_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f64m2(...) __riscv_vfslide1up_vf_f64m2(__VA_ARGS__) |
| #define | vfslide1up_vf_f64m2_m(...) __riscv_vfslide1up_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f64m4(...) __riscv_vfslide1up_vf_f64m4(__VA_ARGS__) |
| #define | vfslide1up_vf_f64m4_m(...) __riscv_vfslide1up_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfslide1up_vf_f64m8(...) __riscv_vfslide1up_vf_f64m8(__VA_ARGS__) |
| #define | vfslide1up_vf_f64m8_m(...) __riscv_vfslide1up_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f16m1(...) __riscv_vfsqrt_v_f16m1(__VA_ARGS__) |
| #define | vfsqrt_v_f16m1_m(...) __riscv_vfsqrt_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f16m2(...) __riscv_vfsqrt_v_f16m2(__VA_ARGS__) |
| #define | vfsqrt_v_f16m2_m(...) __riscv_vfsqrt_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f16m4(...) __riscv_vfsqrt_v_f16m4(__VA_ARGS__) |
| #define | vfsqrt_v_f16m4_m(...) __riscv_vfsqrt_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f16m8(...) __riscv_vfsqrt_v_f16m8(__VA_ARGS__) |
| #define | vfsqrt_v_f16m8_m(...) __riscv_vfsqrt_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f16mf2(...) __riscv_vfsqrt_v_f16mf2(__VA_ARGS__) |
| #define | vfsqrt_v_f16mf2_m(...) __riscv_vfsqrt_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f16mf4(...) __riscv_vfsqrt_v_f16mf4(__VA_ARGS__) |
| #define | vfsqrt_v_f16mf4_m(...) __riscv_vfsqrt_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f32m1(...) __riscv_vfsqrt_v_f32m1(__VA_ARGS__) |
| #define | vfsqrt_v_f32m1_m(...) __riscv_vfsqrt_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f32m2(...) __riscv_vfsqrt_v_f32m2(__VA_ARGS__) |
| #define | vfsqrt_v_f32m2_m(...) __riscv_vfsqrt_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f32m4(...) __riscv_vfsqrt_v_f32m4(__VA_ARGS__) |
| #define | vfsqrt_v_f32m4_m(...) __riscv_vfsqrt_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f32m8(...) __riscv_vfsqrt_v_f32m8(__VA_ARGS__) |
| #define | vfsqrt_v_f32m8_m(...) __riscv_vfsqrt_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f32mf2(...) __riscv_vfsqrt_v_f32mf2(__VA_ARGS__) |
| #define | vfsqrt_v_f32mf2_m(...) __riscv_vfsqrt_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f64m1(...) __riscv_vfsqrt_v_f64m1(__VA_ARGS__) |
| #define | vfsqrt_v_f64m1_m(...) __riscv_vfsqrt_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f64m2(...) __riscv_vfsqrt_v_f64m2(__VA_ARGS__) |
| #define | vfsqrt_v_f64m2_m(...) __riscv_vfsqrt_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f64m4(...) __riscv_vfsqrt_v_f64m4(__VA_ARGS__) |
| #define | vfsqrt_v_f64m4_m(...) __riscv_vfsqrt_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfsqrt_v_f64m8(...) __riscv_vfsqrt_v_f64m8(__VA_ARGS__) |
| #define | vfsqrt_v_f64m8_m(...) __riscv_vfsqrt_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f16m1(...) __riscv_vfsub_vf_f16m1(__VA_ARGS__) |
| #define | vfsub_vf_f16m1_m(...) __riscv_vfsub_vf_f16m1_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f16m2(...) __riscv_vfsub_vf_f16m2(__VA_ARGS__) |
| #define | vfsub_vf_f16m2_m(...) __riscv_vfsub_vf_f16m2_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f16m4(...) __riscv_vfsub_vf_f16m4(__VA_ARGS__) |
| #define | vfsub_vf_f16m4_m(...) __riscv_vfsub_vf_f16m4_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f16m8(...) __riscv_vfsub_vf_f16m8(__VA_ARGS__) |
| #define | vfsub_vf_f16m8_m(...) __riscv_vfsub_vf_f16m8_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f16mf2(...) __riscv_vfsub_vf_f16mf2(__VA_ARGS__) |
| #define | vfsub_vf_f16mf2_m(...) __riscv_vfsub_vf_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f16mf4(...) __riscv_vfsub_vf_f16mf4(__VA_ARGS__) |
| #define | vfsub_vf_f16mf4_m(...) __riscv_vfsub_vf_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f32m1(...) __riscv_vfsub_vf_f32m1(__VA_ARGS__) |
| #define | vfsub_vf_f32m1_m(...) __riscv_vfsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f32m2(...) __riscv_vfsub_vf_f32m2(__VA_ARGS__) |
| #define | vfsub_vf_f32m2_m(...) __riscv_vfsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f32m4(...) __riscv_vfsub_vf_f32m4(__VA_ARGS__) |
| #define | vfsub_vf_f32m4_m(...) __riscv_vfsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f32m8(...) __riscv_vfsub_vf_f32m8(__VA_ARGS__) |
| #define | vfsub_vf_f32m8_m(...) __riscv_vfsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f32mf2(...) __riscv_vfsub_vf_f32mf2(__VA_ARGS__) |
| #define | vfsub_vf_f32mf2_m(...) __riscv_vfsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f64m1(...) __riscv_vfsub_vf_f64m1(__VA_ARGS__) |
| #define | vfsub_vf_f64m1_m(...) __riscv_vfsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f64m2(...) __riscv_vfsub_vf_f64m2(__VA_ARGS__) |
| #define | vfsub_vf_f64m2_m(...) __riscv_vfsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f64m4(...) __riscv_vfsub_vf_f64m4(__VA_ARGS__) |
| #define | vfsub_vf_f64m4_m(...) __riscv_vfsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfsub_vf_f64m8(...) __riscv_vfsub_vf_f64m8(__VA_ARGS__) |
| #define | vfsub_vf_f64m8_m(...) __riscv_vfsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f16m1(...) __riscv_vfsub_vv_f16m1(__VA_ARGS__) |
| #define | vfsub_vv_f16m1_m(...) __riscv_vfsub_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f16m2(...) __riscv_vfsub_vv_f16m2(__VA_ARGS__) |
| #define | vfsub_vv_f16m2_m(...) __riscv_vfsub_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f16m4(...) __riscv_vfsub_vv_f16m4(__VA_ARGS__) |
| #define | vfsub_vv_f16m4_m(...) __riscv_vfsub_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f16m8(...) __riscv_vfsub_vv_f16m8(__VA_ARGS__) |
| #define | vfsub_vv_f16m8_m(...) __riscv_vfsub_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f16mf2(...) __riscv_vfsub_vv_f16mf2(__VA_ARGS__) |
| #define | vfsub_vv_f16mf2_m(...) __riscv_vfsub_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f16mf4(...) __riscv_vfsub_vv_f16mf4(__VA_ARGS__) |
| #define | vfsub_vv_f16mf4_m(...) __riscv_vfsub_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f32m1(...) __riscv_vfsub_vv_f32m1(__VA_ARGS__) |
| #define | vfsub_vv_f32m1_m(...) __riscv_vfsub_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f32m2(...) __riscv_vfsub_vv_f32m2(__VA_ARGS__) |
| #define | vfsub_vv_f32m2_m(...) __riscv_vfsub_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f32m4(...) __riscv_vfsub_vv_f32m4(__VA_ARGS__) |
| #define | vfsub_vv_f32m4_m(...) __riscv_vfsub_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f32m8(...) __riscv_vfsub_vv_f32m8(__VA_ARGS__) |
| #define | vfsub_vv_f32m8_m(...) __riscv_vfsub_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f32mf2(...) __riscv_vfsub_vv_f32mf2(__VA_ARGS__) |
| #define | vfsub_vv_f32mf2_m(...) __riscv_vfsub_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f64m1(...) __riscv_vfsub_vv_f64m1(__VA_ARGS__) |
| #define | vfsub_vv_f64m1_m(...) __riscv_vfsub_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f64m2(...) __riscv_vfsub_vv_f64m2(__VA_ARGS__) |
| #define | vfsub_vv_f64m2_m(...) __riscv_vfsub_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f64m4(...) __riscv_vfsub_vv_f64m4(__VA_ARGS__) |
| #define | vfsub_vv_f64m4_m(...) __riscv_vfsub_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfsub_vv_f64m8(...) __riscv_vfsub_vv_f64m8(__VA_ARGS__) |
| #define | vfsub_vv_f64m8_m(...) __riscv_vfsub_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f32m1(...) __riscv_vfwadd_vf_f32m1(__VA_ARGS__) |
| #define | vfwadd_vf_f32m1_m(...) __riscv_vfwadd_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f32m2(...) __riscv_vfwadd_vf_f32m2(__VA_ARGS__) |
| #define | vfwadd_vf_f32m2_m(...) __riscv_vfwadd_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f32m4(...) __riscv_vfwadd_vf_f32m4(__VA_ARGS__) |
| #define | vfwadd_vf_f32m4_m(...) __riscv_vfwadd_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f32m8(...) __riscv_vfwadd_vf_f32m8(__VA_ARGS__) |
| #define | vfwadd_vf_f32m8_m(...) __riscv_vfwadd_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f32mf2(...) __riscv_vfwadd_vf_f32mf2(__VA_ARGS__) |
| #define | vfwadd_vf_f32mf2_m(...) __riscv_vfwadd_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f64m1(...) __riscv_vfwadd_vf_f64m1(__VA_ARGS__) |
| #define | vfwadd_vf_f64m1_m(...) __riscv_vfwadd_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f64m2(...) __riscv_vfwadd_vf_f64m2(__VA_ARGS__) |
| #define | vfwadd_vf_f64m2_m(...) __riscv_vfwadd_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f64m4(...) __riscv_vfwadd_vf_f64m4(__VA_ARGS__) |
| #define | vfwadd_vf_f64m4_m(...) __riscv_vfwadd_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwadd_vf_f64m8(...) __riscv_vfwadd_vf_f64m8(__VA_ARGS__) |
| #define | vfwadd_vf_f64m8_m(...) __riscv_vfwadd_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f32m1(...) __riscv_vfwadd_vv_f32m1(__VA_ARGS__) |
| #define | vfwadd_vv_f32m1_m(...) __riscv_vfwadd_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f32m2(...) __riscv_vfwadd_vv_f32m2(__VA_ARGS__) |
| #define | vfwadd_vv_f32m2_m(...) __riscv_vfwadd_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f32m4(...) __riscv_vfwadd_vv_f32m4(__VA_ARGS__) |
| #define | vfwadd_vv_f32m4_m(...) __riscv_vfwadd_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f32m8(...) __riscv_vfwadd_vv_f32m8(__VA_ARGS__) |
| #define | vfwadd_vv_f32m8_m(...) __riscv_vfwadd_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f32mf2(...) __riscv_vfwadd_vv_f32mf2(__VA_ARGS__) |
| #define | vfwadd_vv_f32mf2_m(...) __riscv_vfwadd_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f64m1(...) __riscv_vfwadd_vv_f64m1(__VA_ARGS__) |
| #define | vfwadd_vv_f64m1_m(...) __riscv_vfwadd_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f64m2(...) __riscv_vfwadd_vv_f64m2(__VA_ARGS__) |
| #define | vfwadd_vv_f64m2_m(...) __riscv_vfwadd_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f64m4(...) __riscv_vfwadd_vv_f64m4(__VA_ARGS__) |
| #define | vfwadd_vv_f64m4_m(...) __riscv_vfwadd_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwadd_vv_f64m8(...) __riscv_vfwadd_vv_f64m8(__VA_ARGS__) |
| #define | vfwadd_vv_f64m8_m(...) __riscv_vfwadd_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f32m1(...) __riscv_vfwadd_wf_f32m1(__VA_ARGS__) |
| #define | vfwadd_wf_f32m1_m(...) __riscv_vfwadd_wf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f32m2(...) __riscv_vfwadd_wf_f32m2(__VA_ARGS__) |
| #define | vfwadd_wf_f32m2_m(...) __riscv_vfwadd_wf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f32m4(...) __riscv_vfwadd_wf_f32m4(__VA_ARGS__) |
| #define | vfwadd_wf_f32m4_m(...) __riscv_vfwadd_wf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f32m8(...) __riscv_vfwadd_wf_f32m8(__VA_ARGS__) |
| #define | vfwadd_wf_f32m8_m(...) __riscv_vfwadd_wf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f32mf2(...) __riscv_vfwadd_wf_f32mf2(__VA_ARGS__) |
| #define | vfwadd_wf_f32mf2_m(...) __riscv_vfwadd_wf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f64m1(...) __riscv_vfwadd_wf_f64m1(__VA_ARGS__) |
| #define | vfwadd_wf_f64m1_m(...) __riscv_vfwadd_wf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f64m2(...) __riscv_vfwadd_wf_f64m2(__VA_ARGS__) |
| #define | vfwadd_wf_f64m2_m(...) __riscv_vfwadd_wf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f64m4(...) __riscv_vfwadd_wf_f64m4(__VA_ARGS__) |
| #define | vfwadd_wf_f64m4_m(...) __riscv_vfwadd_wf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwadd_wf_f64m8(...) __riscv_vfwadd_wf_f64m8(__VA_ARGS__) |
| #define | vfwadd_wf_f64m8_m(...) __riscv_vfwadd_wf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f32m1(...) __riscv_vfwadd_wv_f32m1(__VA_ARGS__) |
| #define | vfwadd_wv_f32m1_m(...) __riscv_vfwadd_wv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f32m2(...) __riscv_vfwadd_wv_f32m2(__VA_ARGS__) |
| #define | vfwadd_wv_f32m2_m(...) __riscv_vfwadd_wv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f32m4(...) __riscv_vfwadd_wv_f32m4(__VA_ARGS__) |
| #define | vfwadd_wv_f32m4_m(...) __riscv_vfwadd_wv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f32m8(...) __riscv_vfwadd_wv_f32m8(__VA_ARGS__) |
| #define | vfwadd_wv_f32m8_m(...) __riscv_vfwadd_wv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f32mf2(...) __riscv_vfwadd_wv_f32mf2(__VA_ARGS__) |
| #define | vfwadd_wv_f32mf2_m(...) __riscv_vfwadd_wv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f64m1(...) __riscv_vfwadd_wv_f64m1(__VA_ARGS__) |
| #define | vfwadd_wv_f64m1_m(...) __riscv_vfwadd_wv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f64m2(...) __riscv_vfwadd_wv_f64m2(__VA_ARGS__) |
| #define | vfwadd_wv_f64m2_m(...) __riscv_vfwadd_wv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f64m4(...) __riscv_vfwadd_wv_f64m4(__VA_ARGS__) |
| #define | vfwadd_wv_f64m4_m(...) __riscv_vfwadd_wv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwadd_wv_f64m8(...) __riscv_vfwadd_wv_f64m8(__VA_ARGS__) |
| #define | vfwadd_wv_f64m8_m(...) __riscv_vfwadd_wv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32m1(...) __riscv_vfwcvt_f_f_v_f32m1(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32m1_m(...) __riscv_vfwcvt_f_f_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32m2(...) __riscv_vfwcvt_f_f_v_f32m2(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32m2_m(...) __riscv_vfwcvt_f_f_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32m4(...) __riscv_vfwcvt_f_f_v_f32m4(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32m4_m(...) __riscv_vfwcvt_f_f_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32m8(...) __riscv_vfwcvt_f_f_v_f32m8(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32m8_m(...) __riscv_vfwcvt_f_f_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32mf2(...) __riscv_vfwcvt_f_f_v_f32mf2(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f32mf2_m(...) __riscv_vfwcvt_f_f_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f64m1(...) __riscv_vfwcvt_f_f_v_f64m1(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f64m1_m(...) __riscv_vfwcvt_f_f_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f64m2(...) __riscv_vfwcvt_f_f_v_f64m2(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f64m2_m(...) __riscv_vfwcvt_f_f_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f64m4(...) __riscv_vfwcvt_f_f_v_f64m4(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f64m4_m(...) __riscv_vfwcvt_f_f_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f64m8(...) __riscv_vfwcvt_f_f_v_f64m8(__VA_ARGS__) |
| #define | vfwcvt_f_f_v_f64m8_m(...) __riscv_vfwcvt_f_f_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16m1(...) __riscv_vfwcvt_f_x_v_f16m1(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16m1_m(...) __riscv_vfwcvt_f_x_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16m2(...) __riscv_vfwcvt_f_x_v_f16m2(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16m2_m(...) __riscv_vfwcvt_f_x_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16m4(...) __riscv_vfwcvt_f_x_v_f16m4(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16m4_m(...) __riscv_vfwcvt_f_x_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16m8(...) __riscv_vfwcvt_f_x_v_f16m8(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16m8_m(...) __riscv_vfwcvt_f_x_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16mf2(...) __riscv_vfwcvt_f_x_v_f16mf2(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16mf2_m(...) __riscv_vfwcvt_f_x_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16mf4(...) __riscv_vfwcvt_f_x_v_f16mf4(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f16mf4_m(...) __riscv_vfwcvt_f_x_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32m1(...) __riscv_vfwcvt_f_x_v_f32m1(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32m1_m(...) __riscv_vfwcvt_f_x_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32m2(...) __riscv_vfwcvt_f_x_v_f32m2(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32m2_m(...) __riscv_vfwcvt_f_x_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32m4(...) __riscv_vfwcvt_f_x_v_f32m4(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32m4_m(...) __riscv_vfwcvt_f_x_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32m8(...) __riscv_vfwcvt_f_x_v_f32m8(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32m8_m(...) __riscv_vfwcvt_f_x_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32mf2(...) __riscv_vfwcvt_f_x_v_f32mf2(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f32mf2_m(...) __riscv_vfwcvt_f_x_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f64m1(...) __riscv_vfwcvt_f_x_v_f64m1(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f64m1_m(...) __riscv_vfwcvt_f_x_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f64m2(...) __riscv_vfwcvt_f_x_v_f64m2(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f64m2_m(...) __riscv_vfwcvt_f_x_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f64m4(...) __riscv_vfwcvt_f_x_v_f64m4(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f64m4_m(...) __riscv_vfwcvt_f_x_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f64m8(...) __riscv_vfwcvt_f_x_v_f64m8(__VA_ARGS__) |
| #define | vfwcvt_f_x_v_f64m8_m(...) __riscv_vfwcvt_f_x_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16m1(...) __riscv_vfwcvt_f_xu_v_f16m1(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16m1_m(...) __riscv_vfwcvt_f_xu_v_f16m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16m2(...) __riscv_vfwcvt_f_xu_v_f16m2(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16m2_m(...) __riscv_vfwcvt_f_xu_v_f16m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16m4(...) __riscv_vfwcvt_f_xu_v_f16m4(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16m4_m(...) __riscv_vfwcvt_f_xu_v_f16m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16m8(...) __riscv_vfwcvt_f_xu_v_f16m8(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16m8_m(...) __riscv_vfwcvt_f_xu_v_f16m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16mf2(...) __riscv_vfwcvt_f_xu_v_f16mf2(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16mf2_m(...) __riscv_vfwcvt_f_xu_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16mf4(...) __riscv_vfwcvt_f_xu_v_f16mf4(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f16mf4_m(...) __riscv_vfwcvt_f_xu_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32m1(...) __riscv_vfwcvt_f_xu_v_f32m1(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32m1_m(...) __riscv_vfwcvt_f_xu_v_f32m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32m2(...) __riscv_vfwcvt_f_xu_v_f32m2(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32m2_m(...) __riscv_vfwcvt_f_xu_v_f32m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32m4(...) __riscv_vfwcvt_f_xu_v_f32m4(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32m4_m(...) __riscv_vfwcvt_f_xu_v_f32m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32m8(...) __riscv_vfwcvt_f_xu_v_f32m8(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32m8_m(...) __riscv_vfwcvt_f_xu_v_f32m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32mf2(...) __riscv_vfwcvt_f_xu_v_f32mf2(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f32mf2_m(...) __riscv_vfwcvt_f_xu_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f64m1(...) __riscv_vfwcvt_f_xu_v_f64m1(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f64m1_m(...) __riscv_vfwcvt_f_xu_v_f64m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f64m2(...) __riscv_vfwcvt_f_xu_v_f64m2(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f64m2_m(...) __riscv_vfwcvt_f_xu_v_f64m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f64m4(...) __riscv_vfwcvt_f_xu_v_f64m4(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f64m4_m(...) __riscv_vfwcvt_f_xu_v_f64m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f64m8(...) __riscv_vfwcvt_f_xu_v_f64m8(__VA_ARGS__) |
| #define | vfwcvt_f_xu_v_f64m8_m(...) __riscv_vfwcvt_f_xu_v_f64m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32m1(...) __riscv_vfwcvt_rtz_x_f_v_i32m1(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32m1_m(...) __riscv_vfwcvt_rtz_x_f_v_i32m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32m2(...) __riscv_vfwcvt_rtz_x_f_v_i32m2(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32m2_m(...) __riscv_vfwcvt_rtz_x_f_v_i32m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32m4(...) __riscv_vfwcvt_rtz_x_f_v_i32m4(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32m4_m(...) __riscv_vfwcvt_rtz_x_f_v_i32m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32m8(...) __riscv_vfwcvt_rtz_x_f_v_i32m8(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32m8_m(...) __riscv_vfwcvt_rtz_x_f_v_i32m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32mf2(...) __riscv_vfwcvt_rtz_x_f_v_i32mf2(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i32mf2_m(...) __riscv_vfwcvt_rtz_x_f_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i64m1(...) __riscv_vfwcvt_rtz_x_f_v_i64m1(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i64m1_m(...) __riscv_vfwcvt_rtz_x_f_v_i64m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i64m2(...) __riscv_vfwcvt_rtz_x_f_v_i64m2(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i64m2_m(...) __riscv_vfwcvt_rtz_x_f_v_i64m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i64m4(...) __riscv_vfwcvt_rtz_x_f_v_i64m4(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i64m4_m(...) __riscv_vfwcvt_rtz_x_f_v_i64m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i64m8(...) __riscv_vfwcvt_rtz_x_f_v_i64m8(__VA_ARGS__) |
| #define | vfwcvt_rtz_x_f_v_i64m8_m(...) __riscv_vfwcvt_rtz_x_f_v_i64m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32m1(...) __riscv_vfwcvt_rtz_xu_f_v_u32m1(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32m1_m(...) __riscv_vfwcvt_rtz_xu_f_v_u32m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32m2(...) __riscv_vfwcvt_rtz_xu_f_v_u32m2(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32m2_m(...) __riscv_vfwcvt_rtz_xu_f_v_u32m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32m4(...) __riscv_vfwcvt_rtz_xu_f_v_u32m4(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32m4_m(...) __riscv_vfwcvt_rtz_xu_f_v_u32m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32m8(...) __riscv_vfwcvt_rtz_xu_f_v_u32m8(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32m8_m(...) __riscv_vfwcvt_rtz_xu_f_v_u32m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32mf2(...) __riscv_vfwcvt_rtz_xu_f_v_u32mf2(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u32mf2_m(...) __riscv_vfwcvt_rtz_xu_f_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u64m1(...) __riscv_vfwcvt_rtz_xu_f_v_u64m1(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u64m1_m(...) __riscv_vfwcvt_rtz_xu_f_v_u64m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u64m2(...) __riscv_vfwcvt_rtz_xu_f_v_u64m2(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u64m2_m(...) __riscv_vfwcvt_rtz_xu_f_v_u64m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u64m4(...) __riscv_vfwcvt_rtz_xu_f_v_u64m4(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u64m4_m(...) __riscv_vfwcvt_rtz_xu_f_v_u64m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u64m8(...) __riscv_vfwcvt_rtz_xu_f_v_u64m8(__VA_ARGS__) |
| #define | vfwcvt_rtz_xu_f_v_u64m8_m(...) __riscv_vfwcvt_rtz_xu_f_v_u64m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32m1(...) __riscv_vfwcvt_x_f_v_i32m1(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32m1_m(...) __riscv_vfwcvt_x_f_v_i32m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32m2(...) __riscv_vfwcvt_x_f_v_i32m2(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32m2_m(...) __riscv_vfwcvt_x_f_v_i32m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32m4(...) __riscv_vfwcvt_x_f_v_i32m4(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32m4_m(...) __riscv_vfwcvt_x_f_v_i32m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32m8(...) __riscv_vfwcvt_x_f_v_i32m8(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32m8_m(...) __riscv_vfwcvt_x_f_v_i32m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32mf2(...) __riscv_vfwcvt_x_f_v_i32mf2(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i32mf2_m(...) __riscv_vfwcvt_x_f_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i64m1(...) __riscv_vfwcvt_x_f_v_i64m1(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i64m1_m(...) __riscv_vfwcvt_x_f_v_i64m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i64m2(...) __riscv_vfwcvt_x_f_v_i64m2(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i64m2_m(...) __riscv_vfwcvt_x_f_v_i64m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i64m4(...) __riscv_vfwcvt_x_f_v_i64m4(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i64m4_m(...) __riscv_vfwcvt_x_f_v_i64m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i64m8(...) __riscv_vfwcvt_x_f_v_i64m8(__VA_ARGS__) |
| #define | vfwcvt_x_f_v_i64m8_m(...) __riscv_vfwcvt_x_f_v_i64m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32m1(...) __riscv_vfwcvt_xu_f_v_u32m1(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32m1_m(...) __riscv_vfwcvt_xu_f_v_u32m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32m2(...) __riscv_vfwcvt_xu_f_v_u32m2(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32m2_m(...) __riscv_vfwcvt_xu_f_v_u32m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32m4(...) __riscv_vfwcvt_xu_f_v_u32m4(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32m4_m(...) __riscv_vfwcvt_xu_f_v_u32m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32m8(...) __riscv_vfwcvt_xu_f_v_u32m8(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32m8_m(...) __riscv_vfwcvt_xu_f_v_u32m8_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32mf2(...) __riscv_vfwcvt_xu_f_v_u32mf2(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u32mf2_m(...) __riscv_vfwcvt_xu_f_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u64m1(...) __riscv_vfwcvt_xu_f_v_u64m1(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u64m1_m(...) __riscv_vfwcvt_xu_f_v_u64m1_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u64m2(...) __riscv_vfwcvt_xu_f_v_u64m2(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u64m2_m(...) __riscv_vfwcvt_xu_f_v_u64m2_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u64m4(...) __riscv_vfwcvt_xu_f_v_u64m4(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u64m4_m(...) __riscv_vfwcvt_xu_f_v_u64m4_tumu(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u64m8(...) __riscv_vfwcvt_xu_f_v_u64m8(__VA_ARGS__) |
| #define | vfwcvt_xu_f_v_u64m8_m(...) __riscv_vfwcvt_xu_f_v_u64m8_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32m1(...) __riscv_vfwmacc_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32m1_m(...) __riscv_vfwmacc_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32m2(...) __riscv_vfwmacc_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32m2_m(...) __riscv_vfwmacc_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32m4(...) __riscv_vfwmacc_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32m4_m(...) __riscv_vfwmacc_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32m8(...) __riscv_vfwmacc_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32m8_m(...) __riscv_vfwmacc_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32mf2(...) __riscv_vfwmacc_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f32mf2_m(...) __riscv_vfwmacc_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f64m1(...) __riscv_vfwmacc_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f64m1_m(...) __riscv_vfwmacc_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f64m2(...) __riscv_vfwmacc_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f64m2_m(...) __riscv_vfwmacc_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f64m4(...) __riscv_vfwmacc_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f64m4_m(...) __riscv_vfwmacc_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwmacc_vf_f64m8(...) __riscv_vfwmacc_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfwmacc_vf_f64m8_m(...) __riscv_vfwmacc_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32m1(...) __riscv_vfwmacc_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32m1_m(...) __riscv_vfwmacc_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32m2(...) __riscv_vfwmacc_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32m2_m(...) __riscv_vfwmacc_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32m4(...) __riscv_vfwmacc_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32m4_m(...) __riscv_vfwmacc_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32m8(...) __riscv_vfwmacc_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32m8_m(...) __riscv_vfwmacc_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32mf2(...) __riscv_vfwmacc_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f32mf2_m(...) __riscv_vfwmacc_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f64m1(...) __riscv_vfwmacc_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f64m1_m(...) __riscv_vfwmacc_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f64m2(...) __riscv_vfwmacc_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f64m2_m(...) __riscv_vfwmacc_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f64m4(...) __riscv_vfwmacc_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f64m4_m(...) __riscv_vfwmacc_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwmacc_vv_f64m8(...) __riscv_vfwmacc_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfwmacc_vv_f64m8_m(...) __riscv_vfwmacc_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32m1(...) __riscv_vfwmsac_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32m1_m(...) __riscv_vfwmsac_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32m2(...) __riscv_vfwmsac_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32m2_m(...) __riscv_vfwmsac_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32m4(...) __riscv_vfwmsac_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32m4_m(...) __riscv_vfwmsac_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32m8(...) __riscv_vfwmsac_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32m8_m(...) __riscv_vfwmsac_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32mf2(...) __riscv_vfwmsac_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f32mf2_m(...) __riscv_vfwmsac_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f64m1(...) __riscv_vfwmsac_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f64m1_m(...) __riscv_vfwmsac_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f64m2(...) __riscv_vfwmsac_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f64m2_m(...) __riscv_vfwmsac_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f64m4(...) __riscv_vfwmsac_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f64m4_m(...) __riscv_vfwmsac_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwmsac_vf_f64m8(...) __riscv_vfwmsac_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfwmsac_vf_f64m8_m(...) __riscv_vfwmsac_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32m1(...) __riscv_vfwmsac_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32m1_m(...) __riscv_vfwmsac_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32m2(...) __riscv_vfwmsac_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32m2_m(...) __riscv_vfwmsac_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32m4(...) __riscv_vfwmsac_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32m4_m(...) __riscv_vfwmsac_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32m8(...) __riscv_vfwmsac_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32m8_m(...) __riscv_vfwmsac_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32mf2(...) __riscv_vfwmsac_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f32mf2_m(...) __riscv_vfwmsac_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f64m1(...) __riscv_vfwmsac_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f64m1_m(...) __riscv_vfwmsac_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f64m2(...) __riscv_vfwmsac_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f64m2_m(...) __riscv_vfwmsac_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f64m4(...) __riscv_vfwmsac_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f64m4_m(...) __riscv_vfwmsac_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwmsac_vv_f64m8(...) __riscv_vfwmsac_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfwmsac_vv_f64m8_m(...) __riscv_vfwmsac_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f32m1(...) __riscv_vfwmul_vf_f32m1(__VA_ARGS__) |
| #define | vfwmul_vf_f32m1_m(...) __riscv_vfwmul_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f32m2(...) __riscv_vfwmul_vf_f32m2(__VA_ARGS__) |
| #define | vfwmul_vf_f32m2_m(...) __riscv_vfwmul_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f32m4(...) __riscv_vfwmul_vf_f32m4(__VA_ARGS__) |
| #define | vfwmul_vf_f32m4_m(...) __riscv_vfwmul_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f32m8(...) __riscv_vfwmul_vf_f32m8(__VA_ARGS__) |
| #define | vfwmul_vf_f32m8_m(...) __riscv_vfwmul_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f32mf2(...) __riscv_vfwmul_vf_f32mf2(__VA_ARGS__) |
| #define | vfwmul_vf_f32mf2_m(...) __riscv_vfwmul_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f64m1(...) __riscv_vfwmul_vf_f64m1(__VA_ARGS__) |
| #define | vfwmul_vf_f64m1_m(...) __riscv_vfwmul_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f64m2(...) __riscv_vfwmul_vf_f64m2(__VA_ARGS__) |
| #define | vfwmul_vf_f64m2_m(...) __riscv_vfwmul_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f64m4(...) __riscv_vfwmul_vf_f64m4(__VA_ARGS__) |
| #define | vfwmul_vf_f64m4_m(...) __riscv_vfwmul_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwmul_vf_f64m8(...) __riscv_vfwmul_vf_f64m8(__VA_ARGS__) |
| #define | vfwmul_vf_f64m8_m(...) __riscv_vfwmul_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f32m1(...) __riscv_vfwmul_vv_f32m1(__VA_ARGS__) |
| #define | vfwmul_vv_f32m1_m(...) __riscv_vfwmul_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f32m2(...) __riscv_vfwmul_vv_f32m2(__VA_ARGS__) |
| #define | vfwmul_vv_f32m2_m(...) __riscv_vfwmul_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f32m4(...) __riscv_vfwmul_vv_f32m4(__VA_ARGS__) |
| #define | vfwmul_vv_f32m4_m(...) __riscv_vfwmul_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f32m8(...) __riscv_vfwmul_vv_f32m8(__VA_ARGS__) |
| #define | vfwmul_vv_f32m8_m(...) __riscv_vfwmul_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f32mf2(...) __riscv_vfwmul_vv_f32mf2(__VA_ARGS__) |
| #define | vfwmul_vv_f32mf2_m(...) __riscv_vfwmul_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f64m1(...) __riscv_vfwmul_vv_f64m1(__VA_ARGS__) |
| #define | vfwmul_vv_f64m1_m(...) __riscv_vfwmul_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f64m2(...) __riscv_vfwmul_vv_f64m2(__VA_ARGS__) |
| #define | vfwmul_vv_f64m2_m(...) __riscv_vfwmul_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f64m4(...) __riscv_vfwmul_vv_f64m4(__VA_ARGS__) |
| #define | vfwmul_vv_f64m4_m(...) __riscv_vfwmul_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwmul_vv_f64m8(...) __riscv_vfwmul_vv_f64m8(__VA_ARGS__) |
| #define | vfwmul_vv_f64m8_m(...) __riscv_vfwmul_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32m1(...) __riscv_vfwnmacc_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32m1_m(...) __riscv_vfwnmacc_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32m2(...) __riscv_vfwnmacc_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32m2_m(...) __riscv_vfwnmacc_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32m4(...) __riscv_vfwnmacc_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32m4_m(...) __riscv_vfwnmacc_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32m8(...) __riscv_vfwnmacc_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32m8_m(...) __riscv_vfwnmacc_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32mf2(...) __riscv_vfwnmacc_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f32mf2_m(...) __riscv_vfwnmacc_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f64m1(...) __riscv_vfwnmacc_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f64m1_m(...) __riscv_vfwnmacc_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f64m2(...) __riscv_vfwnmacc_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f64m2_m(...) __riscv_vfwnmacc_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f64m4(...) __riscv_vfwnmacc_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f64m4_m(...) __riscv_vfwnmacc_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f64m8(...) __riscv_vfwnmacc_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfwnmacc_vf_f64m8_m(...) __riscv_vfwnmacc_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32m1(...) __riscv_vfwnmacc_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32m1_m(...) __riscv_vfwnmacc_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32m2(...) __riscv_vfwnmacc_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32m2_m(...) __riscv_vfwnmacc_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32m4(...) __riscv_vfwnmacc_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32m4_m(...) __riscv_vfwnmacc_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32m8(...) __riscv_vfwnmacc_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32m8_m(...) __riscv_vfwnmacc_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32mf2(...) __riscv_vfwnmacc_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f32mf2_m(...) __riscv_vfwnmacc_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f64m1(...) __riscv_vfwnmacc_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f64m1_m(...) __riscv_vfwnmacc_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f64m2(...) __riscv_vfwnmacc_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f64m2_m(...) __riscv_vfwnmacc_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f64m4(...) __riscv_vfwnmacc_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f64m4_m(...) __riscv_vfwnmacc_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f64m8(...) __riscv_vfwnmacc_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfwnmacc_vv_f64m8_m(...) __riscv_vfwnmacc_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32m1(...) __riscv_vfwnmsac_vf_f32m1_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32m1_m(...) __riscv_vfwnmsac_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32m2(...) __riscv_vfwnmsac_vf_f32m2_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32m2_m(...) __riscv_vfwnmsac_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32m4(...) __riscv_vfwnmsac_vf_f32m4_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32m4_m(...) __riscv_vfwnmsac_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32m8(...) __riscv_vfwnmsac_vf_f32m8_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32m8_m(...) __riscv_vfwnmsac_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32mf2(...) __riscv_vfwnmsac_vf_f32mf2_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f32mf2_m(...) __riscv_vfwnmsac_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f64m1(...) __riscv_vfwnmsac_vf_f64m1_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f64m1_m(...) __riscv_vfwnmsac_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f64m2(...) __riscv_vfwnmsac_vf_f64m2_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f64m2_m(...) __riscv_vfwnmsac_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f64m4(...) __riscv_vfwnmsac_vf_f64m4_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f64m4_m(...) __riscv_vfwnmsac_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f64m8(...) __riscv_vfwnmsac_vf_f64m8_tu(__VA_ARGS__) |
| #define | vfwnmsac_vf_f64m8_m(...) __riscv_vfwnmsac_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32m1(...) __riscv_vfwnmsac_vv_f32m1_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32m1_m(...) __riscv_vfwnmsac_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32m2(...) __riscv_vfwnmsac_vv_f32m2_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32m2_m(...) __riscv_vfwnmsac_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32m4(...) __riscv_vfwnmsac_vv_f32m4_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32m4_m(...) __riscv_vfwnmsac_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32m8(...) __riscv_vfwnmsac_vv_f32m8_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32m8_m(...) __riscv_vfwnmsac_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32mf2(...) __riscv_vfwnmsac_vv_f32mf2_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f32mf2_m(...) __riscv_vfwnmsac_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f64m1(...) __riscv_vfwnmsac_vv_f64m1_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f64m1_m(...) __riscv_vfwnmsac_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f64m2(...) __riscv_vfwnmsac_vv_f64m2_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f64m2_m(...) __riscv_vfwnmsac_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f64m4(...) __riscv_vfwnmsac_vv_f64m4_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f64m4_m(...) __riscv_vfwnmsac_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f64m8(...) __riscv_vfwnmsac_vv_f64m8_tu(__VA_ARGS__) |
| #define | vfwnmsac_vv_f64m8_m(...) __riscv_vfwnmsac_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwredosum_vs_f16m1_f32m1(...) __riscv_vfwredosum_vs_f16m1_f32m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f16m1_f32m1_m(...) __riscv_vfwredosum_vs_f16m1_f32m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f16m2_f32m1(...) __riscv_vfwredosum_vs_f16m2_f32m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f16m2_f32m1_m(...) __riscv_vfwredosum_vs_f16m2_f32m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f16m4_f32m1(...) __riscv_vfwredosum_vs_f16m4_f32m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f16m4_f32m1_m(...) __riscv_vfwredosum_vs_f16m4_f32m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f16m8_f32m1(...) __riscv_vfwredosum_vs_f16m8_f32m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f16m8_f32m1_m(...) __riscv_vfwredosum_vs_f16m8_f32m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f16mf2_f32m1(...) __riscv_vfwredosum_vs_f16mf2_f32m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f16mf2_f32m1_m(...) __riscv_vfwredosum_vs_f16mf2_f32m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f16mf4_f32m1(...) __riscv_vfwredosum_vs_f16mf4_f32m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f16mf4_f32m1_m(...) __riscv_vfwredosum_vs_f16mf4_f32m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f32m1_f64m1(...) __riscv_vfwredosum_vs_f32m1_f64m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f32m1_f64m1_m(...) __riscv_vfwredosum_vs_f32m1_f64m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f32m2_f64m1(...) __riscv_vfwredosum_vs_f32m2_f64m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f32m2_f64m1_m(...) __riscv_vfwredosum_vs_f32m2_f64m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f32m4_f64m1(...) __riscv_vfwredosum_vs_f32m4_f64m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f32m4_f64m1_m(...) __riscv_vfwredosum_vs_f32m4_f64m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f32m8_f64m1(...) __riscv_vfwredosum_vs_f32m8_f64m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f32m8_f64m1_m(...) __riscv_vfwredosum_vs_f32m8_f64m1_tum(__VA_ARGS__) |
| #define | vfwredosum_vs_f32mf2_f64m1(...) __riscv_vfwredosum_vs_f32mf2_f64m1_tu(__VA_ARGS__) |
| #define | vfwredosum_vs_f32mf2_f64m1_m(...) __riscv_vfwredosum_vs_f32mf2_f64m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f16m1_f32m1(...) __riscv_vfwredusum_vs_f16m1_f32m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f16m1_f32m1_m(...) __riscv_vfwredusum_vs_f16m1_f32m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f16m2_f32m1(...) __riscv_vfwredusum_vs_f16m2_f32m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f16m2_f32m1_m(...) __riscv_vfwredusum_vs_f16m2_f32m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f16m4_f32m1(...) __riscv_vfwredusum_vs_f16m4_f32m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f16m4_f32m1_m(...) __riscv_vfwredusum_vs_f16m4_f32m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f16m8_f32m1(...) __riscv_vfwredusum_vs_f16m8_f32m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f16m8_f32m1_m(...) __riscv_vfwredusum_vs_f16m8_f32m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f16mf2_f32m1(...) __riscv_vfwredusum_vs_f16mf2_f32m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f16mf2_f32m1_m(...) __riscv_vfwredusum_vs_f16mf2_f32m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f16mf4_f32m1(...) __riscv_vfwredusum_vs_f16mf4_f32m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f16mf4_f32m1_m(...) __riscv_vfwredusum_vs_f16mf4_f32m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f32m1_f64m1(...) __riscv_vfwredusum_vs_f32m1_f64m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f32m1_f64m1_m(...) __riscv_vfwredusum_vs_f32m1_f64m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f32m2_f64m1(...) __riscv_vfwredusum_vs_f32m2_f64m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f32m2_f64m1_m(...) __riscv_vfwredusum_vs_f32m2_f64m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f32m4_f64m1(...) __riscv_vfwredusum_vs_f32m4_f64m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f32m4_f64m1_m(...) __riscv_vfwredusum_vs_f32m4_f64m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f32m8_f64m1(...) __riscv_vfwredusum_vs_f32m8_f64m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f32m8_f64m1_m(...) __riscv_vfwredusum_vs_f32m8_f64m1_tum(__VA_ARGS__) |
| #define | vfwredusum_vs_f32mf2_f64m1(...) __riscv_vfwredusum_vs_f32mf2_f64m1_tu(__VA_ARGS__) |
| #define | vfwredusum_vs_f32mf2_f64m1_m(...) __riscv_vfwredusum_vs_f32mf2_f64m1_tum(__VA_ARGS__) |
| #define | vfwsub_vf_f32m1(...) __riscv_vfwsub_vf_f32m1(__VA_ARGS__) |
| #define | vfwsub_vf_f32m1_m(...) __riscv_vfwsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwsub_vf_f32m2(...) __riscv_vfwsub_vf_f32m2(__VA_ARGS__) |
| #define | vfwsub_vf_f32m2_m(...) __riscv_vfwsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwsub_vf_f32m4(...) __riscv_vfwsub_vf_f32m4(__VA_ARGS__) |
| #define | vfwsub_vf_f32m4_m(...) __riscv_vfwsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwsub_vf_f32m8(...) __riscv_vfwsub_vf_f32m8(__VA_ARGS__) |
| #define | vfwsub_vf_f32m8_m(...) __riscv_vfwsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwsub_vf_f32mf2(...) __riscv_vfwsub_vf_f32mf2(__VA_ARGS__) |
| #define | vfwsub_vf_f32mf2_m(...) __riscv_vfwsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwsub_vf_f64m1(...) __riscv_vfwsub_vf_f64m1(__VA_ARGS__) |
| #define | vfwsub_vf_f64m1_m(...) __riscv_vfwsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwsub_vf_f64m2(...) __riscv_vfwsub_vf_f64m2(__VA_ARGS__) |
| #define | vfwsub_vf_f64m2_m(...) __riscv_vfwsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwsub_vf_f64m4(...) __riscv_vfwsub_vf_f64m4(__VA_ARGS__) |
| #define | vfwsub_vf_f64m4_m(...) __riscv_vfwsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwsub_vf_f64m8(...) __riscv_vfwsub_vf_f64m8(__VA_ARGS__) |
| #define | vfwsub_vf_f64m8_m(...) __riscv_vfwsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f32m1(...) __riscv_vfwsub_vv_f32m1(__VA_ARGS__) |
| #define | vfwsub_vv_f32m1_m(...) __riscv_vfwsub_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f32m2(...) __riscv_vfwsub_vv_f32m2(__VA_ARGS__) |
| #define | vfwsub_vv_f32m2_m(...) __riscv_vfwsub_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f32m4(...) __riscv_vfwsub_vv_f32m4(__VA_ARGS__) |
| #define | vfwsub_vv_f32m4_m(...) __riscv_vfwsub_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f32m8(...) __riscv_vfwsub_vv_f32m8(__VA_ARGS__) |
| #define | vfwsub_vv_f32m8_m(...) __riscv_vfwsub_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f32mf2(...) __riscv_vfwsub_vv_f32mf2(__VA_ARGS__) |
| #define | vfwsub_vv_f32mf2_m(...) __riscv_vfwsub_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f64m1(...) __riscv_vfwsub_vv_f64m1(__VA_ARGS__) |
| #define | vfwsub_vv_f64m1_m(...) __riscv_vfwsub_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f64m2(...) __riscv_vfwsub_vv_f64m2(__VA_ARGS__) |
| #define | vfwsub_vv_f64m2_m(...) __riscv_vfwsub_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f64m4(...) __riscv_vfwsub_vv_f64m4(__VA_ARGS__) |
| #define | vfwsub_vv_f64m4_m(...) __riscv_vfwsub_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwsub_vv_f64m8(...) __riscv_vfwsub_vv_f64m8(__VA_ARGS__) |
| #define | vfwsub_vv_f64m8_m(...) __riscv_vfwsub_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f32m1(...) __riscv_vfwsub_wf_f32m1(__VA_ARGS__) |
| #define | vfwsub_wf_f32m1_m(...) __riscv_vfwsub_wf_f32m1_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f32m2(...) __riscv_vfwsub_wf_f32m2(__VA_ARGS__) |
| #define | vfwsub_wf_f32m2_m(...) __riscv_vfwsub_wf_f32m2_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f32m4(...) __riscv_vfwsub_wf_f32m4(__VA_ARGS__) |
| #define | vfwsub_wf_f32m4_m(...) __riscv_vfwsub_wf_f32m4_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f32m8(...) __riscv_vfwsub_wf_f32m8(__VA_ARGS__) |
| #define | vfwsub_wf_f32m8_m(...) __riscv_vfwsub_wf_f32m8_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f32mf2(...) __riscv_vfwsub_wf_f32mf2(__VA_ARGS__) |
| #define | vfwsub_wf_f32mf2_m(...) __riscv_vfwsub_wf_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f64m1(...) __riscv_vfwsub_wf_f64m1(__VA_ARGS__) |
| #define | vfwsub_wf_f64m1_m(...) __riscv_vfwsub_wf_f64m1_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f64m2(...) __riscv_vfwsub_wf_f64m2(__VA_ARGS__) |
| #define | vfwsub_wf_f64m2_m(...) __riscv_vfwsub_wf_f64m2_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f64m4(...) __riscv_vfwsub_wf_f64m4(__VA_ARGS__) |
| #define | vfwsub_wf_f64m4_m(...) __riscv_vfwsub_wf_f64m4_tumu(__VA_ARGS__) |
| #define | vfwsub_wf_f64m8(...) __riscv_vfwsub_wf_f64m8(__VA_ARGS__) |
| #define | vfwsub_wf_f64m8_m(...) __riscv_vfwsub_wf_f64m8_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f32m1(...) __riscv_vfwsub_wv_f32m1(__VA_ARGS__) |
| #define | vfwsub_wv_f32m1_m(...) __riscv_vfwsub_wv_f32m1_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f32m2(...) __riscv_vfwsub_wv_f32m2(__VA_ARGS__) |
| #define | vfwsub_wv_f32m2_m(...) __riscv_vfwsub_wv_f32m2_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f32m4(...) __riscv_vfwsub_wv_f32m4(__VA_ARGS__) |
| #define | vfwsub_wv_f32m4_m(...) __riscv_vfwsub_wv_f32m4_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f32m8(...) __riscv_vfwsub_wv_f32m8(__VA_ARGS__) |
| #define | vfwsub_wv_f32m8_m(...) __riscv_vfwsub_wv_f32m8_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f32mf2(...) __riscv_vfwsub_wv_f32mf2(__VA_ARGS__) |
| #define | vfwsub_wv_f32mf2_m(...) __riscv_vfwsub_wv_f32mf2_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f64m1(...) __riscv_vfwsub_wv_f64m1(__VA_ARGS__) |
| #define | vfwsub_wv_f64m1_m(...) __riscv_vfwsub_wv_f64m1_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f64m2(...) __riscv_vfwsub_wv_f64m2(__VA_ARGS__) |
| #define | vfwsub_wv_f64m2_m(...) __riscv_vfwsub_wv_f64m2_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f64m4(...) __riscv_vfwsub_wv_f64m4(__VA_ARGS__) |
| #define | vfwsub_wv_f64m4_m(...) __riscv_vfwsub_wv_f64m4_tumu(__VA_ARGS__) |
| #define | vfwsub_wv_f64m8(...) __riscv_vfwsub_wv_f64m8(__VA_ARGS__) |
| #define | vfwsub_wv_f64m8_m(...) __riscv_vfwsub_wv_f64m8_tumu(__VA_ARGS__) |
| #define | vget_v_f16m2_f16m1(...) __riscv_vget_v_f16m2_f16m1(__VA_ARGS__) |
| #define | vget_v_f16m4_f16m1(...) __riscv_vget_v_f16m4_f16m1(__VA_ARGS__) |
| #define | vget_v_f16m4_f16m2(...) __riscv_vget_v_f16m4_f16m2(__VA_ARGS__) |
| #define | vget_v_f16m8_f16m1(...) __riscv_vget_v_f16m8_f16m1(__VA_ARGS__) |
| #define | vget_v_f16m8_f16m2(...) __riscv_vget_v_f16m8_f16m2(__VA_ARGS__) |
| #define | vget_v_f16m8_f16m4(...) __riscv_vget_v_f16m8_f16m4(__VA_ARGS__) |
| #define | vget_v_f32m2_f32m1(...) __riscv_vget_v_f32m2_f32m1(__VA_ARGS__) |
| #define | vget_v_f32m4_f32m1(...) __riscv_vget_v_f32m4_f32m1(__VA_ARGS__) |
| #define | vget_v_f32m4_f32m2(...) __riscv_vget_v_f32m4_f32m2(__VA_ARGS__) |
| #define | vget_v_f32m8_f32m1(...) __riscv_vget_v_f32m8_f32m1(__VA_ARGS__) |
| #define | vget_v_f32m8_f32m2(...) __riscv_vget_v_f32m8_f32m2(__VA_ARGS__) |
| #define | vget_v_f32m8_f32m4(...) __riscv_vget_v_f32m8_f32m4(__VA_ARGS__) |
| #define | vget_v_f64m2_f64m1(...) __riscv_vget_v_f64m2_f64m1(__VA_ARGS__) |
| #define | vget_v_f64m4_f64m1(...) __riscv_vget_v_f64m4_f64m1(__VA_ARGS__) |
| #define | vget_v_f64m4_f64m2(...) __riscv_vget_v_f64m4_f64m2(__VA_ARGS__) |
| #define | vget_v_f64m8_f64m1(...) __riscv_vget_v_f64m8_f64m1(__VA_ARGS__) |
| #define | vget_v_f64m8_f64m2(...) __riscv_vget_v_f64m8_f64m2(__VA_ARGS__) |
| #define | vget_v_f64m8_f64m4(...) __riscv_vget_v_f64m8_f64m4(__VA_ARGS__) |
| #define | vget_v_i16m2_i16m1(...) __riscv_vget_v_i16m2_i16m1(__VA_ARGS__) |
| #define | vget_v_i16m4_i16m1(...) __riscv_vget_v_i16m4_i16m1(__VA_ARGS__) |
| #define | vget_v_i16m4_i16m2(...) __riscv_vget_v_i16m4_i16m2(__VA_ARGS__) |
| #define | vget_v_i16m8_i16m1(...) __riscv_vget_v_i16m8_i16m1(__VA_ARGS__) |
| #define | vget_v_i16m8_i16m2(...) __riscv_vget_v_i16m8_i16m2(__VA_ARGS__) |
| #define | vget_v_i16m8_i16m4(...) __riscv_vget_v_i16m8_i16m4(__VA_ARGS__) |
| #define | vget_v_i32m2_i32m1(...) __riscv_vget_v_i32m2_i32m1(__VA_ARGS__) |
| #define | vget_v_i32m4_i32m1(...) __riscv_vget_v_i32m4_i32m1(__VA_ARGS__) |
| #define | vget_v_i32m4_i32m2(...) __riscv_vget_v_i32m4_i32m2(__VA_ARGS__) |
| #define | vget_v_i32m8_i32m1(...) __riscv_vget_v_i32m8_i32m1(__VA_ARGS__) |
| #define | vget_v_i32m8_i32m2(...) __riscv_vget_v_i32m8_i32m2(__VA_ARGS__) |
| #define | vget_v_i32m8_i32m4(...) __riscv_vget_v_i32m8_i32m4(__VA_ARGS__) |
| #define | vget_v_i64m2_i64m1(...) __riscv_vget_v_i64m2_i64m1(__VA_ARGS__) |
| #define | vget_v_i64m4_i64m1(...) __riscv_vget_v_i64m4_i64m1(__VA_ARGS__) |
| #define | vget_v_i64m4_i64m2(...) __riscv_vget_v_i64m4_i64m2(__VA_ARGS__) |
| #define | vget_v_i64m8_i64m1(...) __riscv_vget_v_i64m8_i64m1(__VA_ARGS__) |
| #define | vget_v_i64m8_i64m2(...) __riscv_vget_v_i64m8_i64m2(__VA_ARGS__) |
| #define | vget_v_i64m8_i64m4(...) __riscv_vget_v_i64m8_i64m4(__VA_ARGS__) |
| #define | vget_v_i8m2_i8m1(...) __riscv_vget_v_i8m2_i8m1(__VA_ARGS__) |
| #define | vget_v_i8m4_i8m1(...) __riscv_vget_v_i8m4_i8m1(__VA_ARGS__) |
| #define | vget_v_i8m4_i8m2(...) __riscv_vget_v_i8m4_i8m2(__VA_ARGS__) |
| #define | vget_v_i8m8_i8m1(...) __riscv_vget_v_i8m8_i8m1(__VA_ARGS__) |
| #define | vget_v_i8m8_i8m2(...) __riscv_vget_v_i8m8_i8m2(__VA_ARGS__) |
| #define | vget_v_i8m8_i8m4(...) __riscv_vget_v_i8m8_i8m4(__VA_ARGS__) |
| #define | vget_v_u16m2_u16m1(...) __riscv_vget_v_u16m2_u16m1(__VA_ARGS__) |
| #define | vget_v_u16m4_u16m1(...) __riscv_vget_v_u16m4_u16m1(__VA_ARGS__) |
| #define | vget_v_u16m4_u16m2(...) __riscv_vget_v_u16m4_u16m2(__VA_ARGS__) |
| #define | vget_v_u16m8_u16m1(...) __riscv_vget_v_u16m8_u16m1(__VA_ARGS__) |
| #define | vget_v_u16m8_u16m2(...) __riscv_vget_v_u16m8_u16m2(__VA_ARGS__) |
| #define | vget_v_u16m8_u16m4(...) __riscv_vget_v_u16m8_u16m4(__VA_ARGS__) |
| #define | vget_v_u32m2_u32m1(...) __riscv_vget_v_u32m2_u32m1(__VA_ARGS__) |
| #define | vget_v_u32m4_u32m1(...) __riscv_vget_v_u32m4_u32m1(__VA_ARGS__) |
| #define | vget_v_u32m4_u32m2(...) __riscv_vget_v_u32m4_u32m2(__VA_ARGS__) |
| #define | vget_v_u32m8_u32m1(...) __riscv_vget_v_u32m8_u32m1(__VA_ARGS__) |
| #define | vget_v_u32m8_u32m2(...) __riscv_vget_v_u32m8_u32m2(__VA_ARGS__) |
| #define | vget_v_u32m8_u32m4(...) __riscv_vget_v_u32m8_u32m4(__VA_ARGS__) |
| #define | vget_v_u64m2_u64m1(...) __riscv_vget_v_u64m2_u64m1(__VA_ARGS__) |
| #define | vget_v_u64m4_u64m1(...) __riscv_vget_v_u64m4_u64m1(__VA_ARGS__) |
| #define | vget_v_u64m4_u64m2(...) __riscv_vget_v_u64m4_u64m2(__VA_ARGS__) |
| #define | vget_v_u64m8_u64m1(...) __riscv_vget_v_u64m8_u64m1(__VA_ARGS__) |
| #define | vget_v_u64m8_u64m2(...) __riscv_vget_v_u64m8_u64m2(__VA_ARGS__) |
| #define | vget_v_u64m8_u64m4(...) __riscv_vget_v_u64m8_u64m4(__VA_ARGS__) |
| #define | vget_v_u8m2_u8m1(...) __riscv_vget_v_u8m2_u8m1(__VA_ARGS__) |
| #define | vget_v_u8m4_u8m1(...) __riscv_vget_v_u8m4_u8m1(__VA_ARGS__) |
| #define | vget_v_u8m4_u8m2(...) __riscv_vget_v_u8m4_u8m2(__VA_ARGS__) |
| #define | vget_v_u8m8_u8m1(...) __riscv_vget_v_u8m8_u8m1(__VA_ARGS__) |
| #define | vget_v_u8m8_u8m2(...) __riscv_vget_v_u8m8_u8m2(__VA_ARGS__) |
| #define | vget_v_u8m8_u8m4(...) __riscv_vget_v_u8m8_u8m4(__VA_ARGS__) |
| #define | vid_v_u16m1(...) __riscv_vid_v_u16m1(__VA_ARGS__) |
| #define | vid_v_u16m1_m(...) __riscv_vid_v_u16m1_tumu(__VA_ARGS__) |
| #define | vid_v_u16m2(...) __riscv_vid_v_u16m2(__VA_ARGS__) |
| #define | vid_v_u16m2_m(...) __riscv_vid_v_u16m2_tumu(__VA_ARGS__) |
| #define | vid_v_u16m4(...) __riscv_vid_v_u16m4(__VA_ARGS__) |
| #define | vid_v_u16m4_m(...) __riscv_vid_v_u16m4_tumu(__VA_ARGS__) |
| #define | vid_v_u16m8(...) __riscv_vid_v_u16m8(__VA_ARGS__) |
| #define | vid_v_u16m8_m(...) __riscv_vid_v_u16m8_tumu(__VA_ARGS__) |
| #define | vid_v_u16mf2(...) __riscv_vid_v_u16mf2(__VA_ARGS__) |
| #define | vid_v_u16mf2_m(...) __riscv_vid_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vid_v_u16mf4(...) __riscv_vid_v_u16mf4(__VA_ARGS__) |
| #define | vid_v_u16mf4_m(...) __riscv_vid_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vid_v_u32m1(...) __riscv_vid_v_u32m1(__VA_ARGS__) |
| #define | vid_v_u32m1_m(...) __riscv_vid_v_u32m1_tumu(__VA_ARGS__) |
| #define | vid_v_u32m2(...) __riscv_vid_v_u32m2(__VA_ARGS__) |
| #define | vid_v_u32m2_m(...) __riscv_vid_v_u32m2_tumu(__VA_ARGS__) |
| #define | vid_v_u32m4(...) __riscv_vid_v_u32m4(__VA_ARGS__) |
| #define | vid_v_u32m4_m(...) __riscv_vid_v_u32m4_tumu(__VA_ARGS__) |
| #define | vid_v_u32m8(...) __riscv_vid_v_u32m8(__VA_ARGS__) |
| #define | vid_v_u32m8_m(...) __riscv_vid_v_u32m8_tumu(__VA_ARGS__) |
| #define | vid_v_u32mf2(...) __riscv_vid_v_u32mf2(__VA_ARGS__) |
| #define | vid_v_u32mf2_m(...) __riscv_vid_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vid_v_u64m1(...) __riscv_vid_v_u64m1(__VA_ARGS__) |
| #define | vid_v_u64m1_m(...) __riscv_vid_v_u64m1_tumu(__VA_ARGS__) |
| #define | vid_v_u64m2(...) __riscv_vid_v_u64m2(__VA_ARGS__) |
| #define | vid_v_u64m2_m(...) __riscv_vid_v_u64m2_tumu(__VA_ARGS__) |
| #define | vid_v_u64m4(...) __riscv_vid_v_u64m4(__VA_ARGS__) |
| #define | vid_v_u64m4_m(...) __riscv_vid_v_u64m4_tumu(__VA_ARGS__) |
| #define | vid_v_u64m8(...) __riscv_vid_v_u64m8(__VA_ARGS__) |
| #define | vid_v_u64m8_m(...) __riscv_vid_v_u64m8_tumu(__VA_ARGS__) |
| #define | vid_v_u8m1(...) __riscv_vid_v_u8m1(__VA_ARGS__) |
| #define | vid_v_u8m1_m(...) __riscv_vid_v_u8m1_tumu(__VA_ARGS__) |
| #define | vid_v_u8m2(...) __riscv_vid_v_u8m2(__VA_ARGS__) |
| #define | vid_v_u8m2_m(...) __riscv_vid_v_u8m2_tumu(__VA_ARGS__) |
| #define | vid_v_u8m4(...) __riscv_vid_v_u8m4(__VA_ARGS__) |
| #define | vid_v_u8m4_m(...) __riscv_vid_v_u8m4_tumu(__VA_ARGS__) |
| #define | vid_v_u8m8(...) __riscv_vid_v_u8m8(__VA_ARGS__) |
| #define | vid_v_u8m8_m(...) __riscv_vid_v_u8m8_tumu(__VA_ARGS__) |
| #define | vid_v_u8mf2(...) __riscv_vid_v_u8mf2(__VA_ARGS__) |
| #define | vid_v_u8mf2_m(...) __riscv_vid_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vid_v_u8mf4(...) __riscv_vid_v_u8mf4(__VA_ARGS__) |
| #define | vid_v_u8mf4_m(...) __riscv_vid_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vid_v_u8mf8(...) __riscv_vid_v_u8mf8(__VA_ARGS__) |
| #define | vid_v_u8mf8_m(...) __riscv_vid_v_u8mf8_tumu(__VA_ARGS__) |
| #define | viota_m_u16m1(...) __riscv_viota_m_u16m1(__VA_ARGS__) |
| #define | viota_m_u16m1_m(...) __riscv_viota_m_u16m1_tumu(__VA_ARGS__) |
| #define | viota_m_u16m2(...) __riscv_viota_m_u16m2(__VA_ARGS__) |
| #define | viota_m_u16m2_m(...) __riscv_viota_m_u16m2_tumu(__VA_ARGS__) |
| #define | viota_m_u16m4(...) __riscv_viota_m_u16m4(__VA_ARGS__) |
| #define | viota_m_u16m4_m(...) __riscv_viota_m_u16m4_tumu(__VA_ARGS__) |
| #define | viota_m_u16m8(...) __riscv_viota_m_u16m8(__VA_ARGS__) |
| #define | viota_m_u16m8_m(...) __riscv_viota_m_u16m8_tumu(__VA_ARGS__) |
| #define | viota_m_u16mf2(...) __riscv_viota_m_u16mf2(__VA_ARGS__) |
| #define | viota_m_u16mf2_m(...) __riscv_viota_m_u16mf2_tumu(__VA_ARGS__) |
| #define | viota_m_u16mf4(...) __riscv_viota_m_u16mf4(__VA_ARGS__) |
| #define | viota_m_u16mf4_m(...) __riscv_viota_m_u16mf4_tumu(__VA_ARGS__) |
| #define | viota_m_u32m1(...) __riscv_viota_m_u32m1(__VA_ARGS__) |
| #define | viota_m_u32m1_m(...) __riscv_viota_m_u32m1_tumu(__VA_ARGS__) |
| #define | viota_m_u32m2(...) __riscv_viota_m_u32m2(__VA_ARGS__) |
| #define | viota_m_u32m2_m(...) __riscv_viota_m_u32m2_tumu(__VA_ARGS__) |
| #define | viota_m_u32m4(...) __riscv_viota_m_u32m4(__VA_ARGS__) |
| #define | viota_m_u32m4_m(...) __riscv_viota_m_u32m4_tumu(__VA_ARGS__) |
| #define | viota_m_u32m8(...) __riscv_viota_m_u32m8(__VA_ARGS__) |
| #define | viota_m_u32m8_m(...) __riscv_viota_m_u32m8_tumu(__VA_ARGS__) |
| #define | viota_m_u32mf2(...) __riscv_viota_m_u32mf2(__VA_ARGS__) |
| #define | viota_m_u32mf2_m(...) __riscv_viota_m_u32mf2_tumu(__VA_ARGS__) |
| #define | viota_m_u64m1(...) __riscv_viota_m_u64m1(__VA_ARGS__) |
| #define | viota_m_u64m1_m(...) __riscv_viota_m_u64m1_tumu(__VA_ARGS__) |
| #define | viota_m_u64m2(...) __riscv_viota_m_u64m2(__VA_ARGS__) |
| #define | viota_m_u64m2_m(...) __riscv_viota_m_u64m2_tumu(__VA_ARGS__) |
| #define | viota_m_u64m4(...) __riscv_viota_m_u64m4(__VA_ARGS__) |
| #define | viota_m_u64m4_m(...) __riscv_viota_m_u64m4_tumu(__VA_ARGS__) |
| #define | viota_m_u64m8(...) __riscv_viota_m_u64m8(__VA_ARGS__) |
| #define | viota_m_u64m8_m(...) __riscv_viota_m_u64m8_tumu(__VA_ARGS__) |
| #define | viota_m_u8m1(...) __riscv_viota_m_u8m1(__VA_ARGS__) |
| #define | viota_m_u8m1_m(...) __riscv_viota_m_u8m1_tumu(__VA_ARGS__) |
| #define | viota_m_u8m2(...) __riscv_viota_m_u8m2(__VA_ARGS__) |
| #define | viota_m_u8m2_m(...) __riscv_viota_m_u8m2_tumu(__VA_ARGS__) |
| #define | viota_m_u8m4(...) __riscv_viota_m_u8m4(__VA_ARGS__) |
| #define | viota_m_u8m4_m(...) __riscv_viota_m_u8m4_tumu(__VA_ARGS__) |
| #define | viota_m_u8m8(...) __riscv_viota_m_u8m8(__VA_ARGS__) |
| #define | viota_m_u8m8_m(...) __riscv_viota_m_u8m8_tumu(__VA_ARGS__) |
| #define | viota_m_u8mf2(...) __riscv_viota_m_u8mf2(__VA_ARGS__) |
| #define | viota_m_u8mf2_m(...) __riscv_viota_m_u8mf2_tumu(__VA_ARGS__) |
| #define | viota_m_u8mf4(...) __riscv_viota_m_u8mf4(__VA_ARGS__) |
| #define | viota_m_u8mf4_m(...) __riscv_viota_m_u8mf4_tumu(__VA_ARGS__) |
| #define | viota_m_u8mf8(...) __riscv_viota_m_u8mf8(__VA_ARGS__) |
| #define | viota_m_u8mf8_m(...) __riscv_viota_m_u8mf8_tumu(__VA_ARGS__) |
| #define | vle16_v_f16m1(...) __riscv_vle16_v_f16m1(__VA_ARGS__) |
| #define | vle16_v_f16m1_m(...) __riscv_vle16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vle16_v_f16m2(...) __riscv_vle16_v_f16m2(__VA_ARGS__) |
| #define | vle16_v_f16m2_m(...) __riscv_vle16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vle16_v_f16m4(...) __riscv_vle16_v_f16m4(__VA_ARGS__) |
| #define | vle16_v_f16m4_m(...) __riscv_vle16_v_f16m4_tumu(__VA_ARGS__) |
| #define | vle16_v_f16m8(...) __riscv_vle16_v_f16m8(__VA_ARGS__) |
| #define | vle16_v_f16m8_m(...) __riscv_vle16_v_f16m8_tumu(__VA_ARGS__) |
| #define | vle16_v_f16mf2(...) __riscv_vle16_v_f16mf2(__VA_ARGS__) |
| #define | vle16_v_f16mf2_m(...) __riscv_vle16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vle16_v_f16mf4(...) __riscv_vle16_v_f16mf4(__VA_ARGS__) |
| #define | vle16_v_f16mf4_m(...) __riscv_vle16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vle16_v_i16m1(...) __riscv_vle16_v_i16m1(__VA_ARGS__) |
| #define | vle16_v_i16m1_m(...) __riscv_vle16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vle16_v_i16m2(...) __riscv_vle16_v_i16m2(__VA_ARGS__) |
| #define | vle16_v_i16m2_m(...) __riscv_vle16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vle16_v_i16m4(...) __riscv_vle16_v_i16m4(__VA_ARGS__) |
| #define | vle16_v_i16m4_m(...) __riscv_vle16_v_i16m4_tumu(__VA_ARGS__) |
| #define | vle16_v_i16m8(...) __riscv_vle16_v_i16m8(__VA_ARGS__) |
| #define | vle16_v_i16m8_m(...) __riscv_vle16_v_i16m8_tumu(__VA_ARGS__) |
| #define | vle16_v_i16mf2(...) __riscv_vle16_v_i16mf2(__VA_ARGS__) |
| #define | vle16_v_i16mf2_m(...) __riscv_vle16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vle16_v_i16mf4(...) __riscv_vle16_v_i16mf4(__VA_ARGS__) |
| #define | vle16_v_i16mf4_m(...) __riscv_vle16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vle16_v_u16m1(...) __riscv_vle16_v_u16m1(__VA_ARGS__) |
| #define | vle16_v_u16m1_m(...) __riscv_vle16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vle16_v_u16m2(...) __riscv_vle16_v_u16m2(__VA_ARGS__) |
| #define | vle16_v_u16m2_m(...) __riscv_vle16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vle16_v_u16m4(...) __riscv_vle16_v_u16m4(__VA_ARGS__) |
| #define | vle16_v_u16m4_m(...) __riscv_vle16_v_u16m4_tumu(__VA_ARGS__) |
| #define | vle16_v_u16m8(...) __riscv_vle16_v_u16m8(__VA_ARGS__) |
| #define | vle16_v_u16m8_m(...) __riscv_vle16_v_u16m8_tumu(__VA_ARGS__) |
| #define | vle16_v_u16mf2(...) __riscv_vle16_v_u16mf2(__VA_ARGS__) |
| #define | vle16_v_u16mf2_m(...) __riscv_vle16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vle16_v_u16mf4(...) __riscv_vle16_v_u16mf4(__VA_ARGS__) |
| #define | vle16_v_u16mf4_m(...) __riscv_vle16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vle16ff_v_f16m1(...) __riscv_vle16ff_v_f16m1(__VA_ARGS__) |
| #define | vle16ff_v_f16m1_m(...) __riscv_vle16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define | vle16ff_v_f16m2(...) __riscv_vle16ff_v_f16m2(__VA_ARGS__) |
| #define | vle16ff_v_f16m2_m(...) __riscv_vle16ff_v_f16m2_tumu(__VA_ARGS__) |
| #define | vle16ff_v_f16m4(...) __riscv_vle16ff_v_f16m4(__VA_ARGS__) |
| #define | vle16ff_v_f16m4_m(...) __riscv_vle16ff_v_f16m4_tumu(__VA_ARGS__) |
| #define | vle16ff_v_f16m8(...) __riscv_vle16ff_v_f16m8(__VA_ARGS__) |
| #define | vle16ff_v_f16m8_m(...) __riscv_vle16ff_v_f16m8_tumu(__VA_ARGS__) |
| #define | vle16ff_v_f16mf2(...) __riscv_vle16ff_v_f16mf2(__VA_ARGS__) |
| #define | vle16ff_v_f16mf2_m(...) __riscv_vle16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vle16ff_v_f16mf4(...) __riscv_vle16ff_v_f16mf4(__VA_ARGS__) |
| #define | vle16ff_v_f16mf4_m(...) __riscv_vle16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vle16ff_v_i16m1(...) __riscv_vle16ff_v_i16m1(__VA_ARGS__) |
| #define | vle16ff_v_i16m1_m(...) __riscv_vle16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define | vle16ff_v_i16m2(...) __riscv_vle16ff_v_i16m2(__VA_ARGS__) |
| #define | vle16ff_v_i16m2_m(...) __riscv_vle16ff_v_i16m2_tumu(__VA_ARGS__) |
| #define | vle16ff_v_i16m4(...) __riscv_vle16ff_v_i16m4(__VA_ARGS__) |
| #define | vle16ff_v_i16m4_m(...) __riscv_vle16ff_v_i16m4_tumu(__VA_ARGS__) |
| #define | vle16ff_v_i16m8(...) __riscv_vle16ff_v_i16m8(__VA_ARGS__) |
| #define | vle16ff_v_i16m8_m(...) __riscv_vle16ff_v_i16m8_tumu(__VA_ARGS__) |
| #define | vle16ff_v_i16mf2(...) __riscv_vle16ff_v_i16mf2(__VA_ARGS__) |
| #define | vle16ff_v_i16mf2_m(...) __riscv_vle16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vle16ff_v_i16mf4(...) __riscv_vle16ff_v_i16mf4(__VA_ARGS__) |
| #define | vle16ff_v_i16mf4_m(...) __riscv_vle16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vle16ff_v_u16m1(...) __riscv_vle16ff_v_u16m1(__VA_ARGS__) |
| #define | vle16ff_v_u16m1_m(...) __riscv_vle16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define | vle16ff_v_u16m2(...) __riscv_vle16ff_v_u16m2(__VA_ARGS__) |
| #define | vle16ff_v_u16m2_m(...) __riscv_vle16ff_v_u16m2_tumu(__VA_ARGS__) |
| #define | vle16ff_v_u16m4(...) __riscv_vle16ff_v_u16m4(__VA_ARGS__) |
| #define | vle16ff_v_u16m4_m(...) __riscv_vle16ff_v_u16m4_tumu(__VA_ARGS__) |
| #define | vle16ff_v_u16m8(...) __riscv_vle16ff_v_u16m8(__VA_ARGS__) |
| #define | vle16ff_v_u16m8_m(...) __riscv_vle16ff_v_u16m8_tumu(__VA_ARGS__) |
| #define | vle16ff_v_u16mf2(...) __riscv_vle16ff_v_u16mf2(__VA_ARGS__) |
| #define | vle16ff_v_u16mf2_m(...) __riscv_vle16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vle16ff_v_u16mf4(...) __riscv_vle16ff_v_u16mf4(__VA_ARGS__) |
| #define | vle16ff_v_u16mf4_m(...) __riscv_vle16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vle32_v_f32m1(...) __riscv_vle32_v_f32m1(__VA_ARGS__) |
| #define | vle32_v_f32m1_m(...) __riscv_vle32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vle32_v_f32m2(...) __riscv_vle32_v_f32m2(__VA_ARGS__) |
| #define | vle32_v_f32m2_m(...) __riscv_vle32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vle32_v_f32m4(...) __riscv_vle32_v_f32m4(__VA_ARGS__) |
| #define | vle32_v_f32m4_m(...) __riscv_vle32_v_f32m4_tumu(__VA_ARGS__) |
| #define | vle32_v_f32m8(...) __riscv_vle32_v_f32m8(__VA_ARGS__) |
| #define | vle32_v_f32m8_m(...) __riscv_vle32_v_f32m8_tumu(__VA_ARGS__) |
| #define | vle32_v_f32mf2(...) __riscv_vle32_v_f32mf2(__VA_ARGS__) |
| #define | vle32_v_f32mf2_m(...) __riscv_vle32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vle32_v_i32m1(...) __riscv_vle32_v_i32m1(__VA_ARGS__) |
| #define | vle32_v_i32m1_m(...) __riscv_vle32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vle32_v_i32m2(...) __riscv_vle32_v_i32m2(__VA_ARGS__) |
| #define | vle32_v_i32m2_m(...) __riscv_vle32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vle32_v_i32m4(...) __riscv_vle32_v_i32m4(__VA_ARGS__) |
| #define | vle32_v_i32m4_m(...) __riscv_vle32_v_i32m4_tumu(__VA_ARGS__) |
| #define | vle32_v_i32m8(...) __riscv_vle32_v_i32m8(__VA_ARGS__) |
| #define | vle32_v_i32m8_m(...) __riscv_vle32_v_i32m8_tumu(__VA_ARGS__) |
| #define | vle32_v_i32mf2(...) __riscv_vle32_v_i32mf2(__VA_ARGS__) |
| #define | vle32_v_i32mf2_m(...) __riscv_vle32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vle32_v_u32m1(...) __riscv_vle32_v_u32m1(__VA_ARGS__) |
| #define | vle32_v_u32m1_m(...) __riscv_vle32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vle32_v_u32m2(...) __riscv_vle32_v_u32m2(__VA_ARGS__) |
| #define | vle32_v_u32m2_m(...) __riscv_vle32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vle32_v_u32m4(...) __riscv_vle32_v_u32m4(__VA_ARGS__) |
| #define | vle32_v_u32m4_m(...) __riscv_vle32_v_u32m4_tumu(__VA_ARGS__) |
| #define | vle32_v_u32m8(...) __riscv_vle32_v_u32m8(__VA_ARGS__) |
| #define | vle32_v_u32m8_m(...) __riscv_vle32_v_u32m8_tumu(__VA_ARGS__) |
| #define | vle32_v_u32mf2(...) __riscv_vle32_v_u32mf2(__VA_ARGS__) |
| #define | vle32_v_u32mf2_m(...) __riscv_vle32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vle32ff_v_f32m1(...) __riscv_vle32ff_v_f32m1(__VA_ARGS__) |
| #define | vle32ff_v_f32m1_m(...) __riscv_vle32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define | vle32ff_v_f32m2(...) __riscv_vle32ff_v_f32m2(__VA_ARGS__) |
| #define | vle32ff_v_f32m2_m(...) __riscv_vle32ff_v_f32m2_tumu(__VA_ARGS__) |
| #define | vle32ff_v_f32m4(...) __riscv_vle32ff_v_f32m4(__VA_ARGS__) |
| #define | vle32ff_v_f32m4_m(...) __riscv_vle32ff_v_f32m4_tumu(__VA_ARGS__) |
| #define | vle32ff_v_f32m8(...) __riscv_vle32ff_v_f32m8(__VA_ARGS__) |
| #define | vle32ff_v_f32m8_m(...) __riscv_vle32ff_v_f32m8_tumu(__VA_ARGS__) |
| #define | vle32ff_v_f32mf2(...) __riscv_vle32ff_v_f32mf2(__VA_ARGS__) |
| #define | vle32ff_v_f32mf2_m(...) __riscv_vle32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vle32ff_v_i32m1(...) __riscv_vle32ff_v_i32m1(__VA_ARGS__) |
| #define | vle32ff_v_i32m1_m(...) __riscv_vle32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define | vle32ff_v_i32m2(...) __riscv_vle32ff_v_i32m2(__VA_ARGS__) |
| #define | vle32ff_v_i32m2_m(...) __riscv_vle32ff_v_i32m2_tumu(__VA_ARGS__) |
| #define | vle32ff_v_i32m4(...) __riscv_vle32ff_v_i32m4(__VA_ARGS__) |
| #define | vle32ff_v_i32m4_m(...) __riscv_vle32ff_v_i32m4_tumu(__VA_ARGS__) |
| #define | vle32ff_v_i32m8(...) __riscv_vle32ff_v_i32m8(__VA_ARGS__) |
| #define | vle32ff_v_i32m8_m(...) __riscv_vle32ff_v_i32m8_tumu(__VA_ARGS__) |
| #define | vle32ff_v_i32mf2(...) __riscv_vle32ff_v_i32mf2(__VA_ARGS__) |
| #define | vle32ff_v_i32mf2_m(...) __riscv_vle32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vle32ff_v_u32m1(...) __riscv_vle32ff_v_u32m1(__VA_ARGS__) |
| #define | vle32ff_v_u32m1_m(...) __riscv_vle32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define | vle32ff_v_u32m2(...) __riscv_vle32ff_v_u32m2(__VA_ARGS__) |
| #define | vle32ff_v_u32m2_m(...) __riscv_vle32ff_v_u32m2_tumu(__VA_ARGS__) |
| #define | vle32ff_v_u32m4(...) __riscv_vle32ff_v_u32m4(__VA_ARGS__) |
| #define | vle32ff_v_u32m4_m(...) __riscv_vle32ff_v_u32m4_tumu(__VA_ARGS__) |
| #define | vle32ff_v_u32m8(...) __riscv_vle32ff_v_u32m8(__VA_ARGS__) |
| #define | vle32ff_v_u32m8_m(...) __riscv_vle32ff_v_u32m8_tumu(__VA_ARGS__) |
| #define | vle32ff_v_u32mf2(...) __riscv_vle32ff_v_u32mf2(__VA_ARGS__) |
| #define | vle32ff_v_u32mf2_m(...) __riscv_vle32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vle64_v_f64m1(...) __riscv_vle64_v_f64m1(__VA_ARGS__) |
| #define | vle64_v_f64m1_m(...) __riscv_vle64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vle64_v_f64m2(...) __riscv_vle64_v_f64m2(__VA_ARGS__) |
| #define | vle64_v_f64m2_m(...) __riscv_vle64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vle64_v_f64m4(...) __riscv_vle64_v_f64m4(__VA_ARGS__) |
| #define | vle64_v_f64m4_m(...) __riscv_vle64_v_f64m4_tumu(__VA_ARGS__) |
| #define | vle64_v_f64m8(...) __riscv_vle64_v_f64m8(__VA_ARGS__) |
| #define | vle64_v_f64m8_m(...) __riscv_vle64_v_f64m8_tumu(__VA_ARGS__) |
| #define | vle64_v_i64m1(...) __riscv_vle64_v_i64m1(__VA_ARGS__) |
| #define | vle64_v_i64m1_m(...) __riscv_vle64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vle64_v_i64m2(...) __riscv_vle64_v_i64m2(__VA_ARGS__) |
| #define | vle64_v_i64m2_m(...) __riscv_vle64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vle64_v_i64m4(...) __riscv_vle64_v_i64m4(__VA_ARGS__) |
| #define | vle64_v_i64m4_m(...) __riscv_vle64_v_i64m4_tumu(__VA_ARGS__) |
| #define | vle64_v_i64m8(...) __riscv_vle64_v_i64m8(__VA_ARGS__) |
| #define | vle64_v_i64m8_m(...) __riscv_vle64_v_i64m8_tumu(__VA_ARGS__) |
| #define | vle64_v_u64m1(...) __riscv_vle64_v_u64m1(__VA_ARGS__) |
| #define | vle64_v_u64m1_m(...) __riscv_vle64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vle64_v_u64m2(...) __riscv_vle64_v_u64m2(__VA_ARGS__) |
| #define | vle64_v_u64m2_m(...) __riscv_vle64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vle64_v_u64m4(...) __riscv_vle64_v_u64m4(__VA_ARGS__) |
| #define | vle64_v_u64m4_m(...) __riscv_vle64_v_u64m4_tumu(__VA_ARGS__) |
| #define | vle64_v_u64m8(...) __riscv_vle64_v_u64m8(__VA_ARGS__) |
| #define | vle64_v_u64m8_m(...) __riscv_vle64_v_u64m8_tumu(__VA_ARGS__) |
| #define | vle64ff_v_f64m1(...) __riscv_vle64ff_v_f64m1(__VA_ARGS__) |
| #define | vle64ff_v_f64m1_m(...) __riscv_vle64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define | vle64ff_v_f64m2(...) __riscv_vle64ff_v_f64m2(__VA_ARGS__) |
| #define | vle64ff_v_f64m2_m(...) __riscv_vle64ff_v_f64m2_tumu(__VA_ARGS__) |
| #define | vle64ff_v_f64m4(...) __riscv_vle64ff_v_f64m4(__VA_ARGS__) |
| #define | vle64ff_v_f64m4_m(...) __riscv_vle64ff_v_f64m4_tumu(__VA_ARGS__) |
| #define | vle64ff_v_f64m8(...) __riscv_vle64ff_v_f64m8(__VA_ARGS__) |
| #define | vle64ff_v_f64m8_m(...) __riscv_vle64ff_v_f64m8_tumu(__VA_ARGS__) |
| #define | vle64ff_v_i64m1(...) __riscv_vle64ff_v_i64m1(__VA_ARGS__) |
| #define | vle64ff_v_i64m1_m(...) __riscv_vle64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define | vle64ff_v_i64m2(...) __riscv_vle64ff_v_i64m2(__VA_ARGS__) |
| #define | vle64ff_v_i64m2_m(...) __riscv_vle64ff_v_i64m2_tumu(__VA_ARGS__) |
| #define | vle64ff_v_i64m4(...) __riscv_vle64ff_v_i64m4(__VA_ARGS__) |
| #define | vle64ff_v_i64m4_m(...) __riscv_vle64ff_v_i64m4_tumu(__VA_ARGS__) |
| #define | vle64ff_v_i64m8(...) __riscv_vle64ff_v_i64m8(__VA_ARGS__) |
| #define | vle64ff_v_i64m8_m(...) __riscv_vle64ff_v_i64m8_tumu(__VA_ARGS__) |
| #define | vle64ff_v_u64m1(...) __riscv_vle64ff_v_u64m1(__VA_ARGS__) |
| #define | vle64ff_v_u64m1_m(...) __riscv_vle64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define | vle64ff_v_u64m2(...) __riscv_vle64ff_v_u64m2(__VA_ARGS__) |
| #define | vle64ff_v_u64m2_m(...) __riscv_vle64ff_v_u64m2_tumu(__VA_ARGS__) |
| #define | vle64ff_v_u64m4(...) __riscv_vle64ff_v_u64m4(__VA_ARGS__) |
| #define | vle64ff_v_u64m4_m(...) __riscv_vle64ff_v_u64m4_tumu(__VA_ARGS__) |
| #define | vle64ff_v_u64m8(...) __riscv_vle64ff_v_u64m8(__VA_ARGS__) |
| #define | vle64ff_v_u64m8_m(...) __riscv_vle64ff_v_u64m8_tumu(__VA_ARGS__) |
| #define | vle8_v_i8m1(...) __riscv_vle8_v_i8m1(__VA_ARGS__) |
| #define | vle8_v_i8m1_m(...) __riscv_vle8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vle8_v_i8m2(...) __riscv_vle8_v_i8m2(__VA_ARGS__) |
| #define | vle8_v_i8m2_m(...) __riscv_vle8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vle8_v_i8m4(...) __riscv_vle8_v_i8m4(__VA_ARGS__) |
| #define | vle8_v_i8m4_m(...) __riscv_vle8_v_i8m4_tumu(__VA_ARGS__) |
| #define | vle8_v_i8m8(...) __riscv_vle8_v_i8m8(__VA_ARGS__) |
| #define | vle8_v_i8m8_m(...) __riscv_vle8_v_i8m8_tumu(__VA_ARGS__) |
| #define | vle8_v_i8mf2(...) __riscv_vle8_v_i8mf2(__VA_ARGS__) |
| #define | vle8_v_i8mf2_m(...) __riscv_vle8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vle8_v_i8mf4(...) __riscv_vle8_v_i8mf4(__VA_ARGS__) |
| #define | vle8_v_i8mf4_m(...) __riscv_vle8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vle8_v_i8mf8(...) __riscv_vle8_v_i8mf8(__VA_ARGS__) |
| #define | vle8_v_i8mf8_m(...) __riscv_vle8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vle8_v_u8m1(...) __riscv_vle8_v_u8m1(__VA_ARGS__) |
| #define | vle8_v_u8m1_m(...) __riscv_vle8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vle8_v_u8m2(...) __riscv_vle8_v_u8m2(__VA_ARGS__) |
| #define | vle8_v_u8m2_m(...) __riscv_vle8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vle8_v_u8m4(...) __riscv_vle8_v_u8m4(__VA_ARGS__) |
| #define | vle8_v_u8m4_m(...) __riscv_vle8_v_u8m4_tumu(__VA_ARGS__) |
| #define | vle8_v_u8m8(...) __riscv_vle8_v_u8m8(__VA_ARGS__) |
| #define | vle8_v_u8m8_m(...) __riscv_vle8_v_u8m8_tumu(__VA_ARGS__) |
| #define | vle8_v_u8mf2(...) __riscv_vle8_v_u8mf2(__VA_ARGS__) |
| #define | vle8_v_u8mf2_m(...) __riscv_vle8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vle8_v_u8mf4(...) __riscv_vle8_v_u8mf4(__VA_ARGS__) |
| #define | vle8_v_u8mf4_m(...) __riscv_vle8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vle8_v_u8mf8(...) __riscv_vle8_v_u8mf8(__VA_ARGS__) |
| #define | vle8_v_u8mf8_m(...) __riscv_vle8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vle8ff_v_i8m1(...) __riscv_vle8ff_v_i8m1(__VA_ARGS__) |
| #define | vle8ff_v_i8m1_m(...) __riscv_vle8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define | vle8ff_v_i8m2(...) __riscv_vle8ff_v_i8m2(__VA_ARGS__) |
| #define | vle8ff_v_i8m2_m(...) __riscv_vle8ff_v_i8m2_tumu(__VA_ARGS__) |
| #define | vle8ff_v_i8m4(...) __riscv_vle8ff_v_i8m4(__VA_ARGS__) |
| #define | vle8ff_v_i8m4_m(...) __riscv_vle8ff_v_i8m4_tumu(__VA_ARGS__) |
| #define | vle8ff_v_i8m8(...) __riscv_vle8ff_v_i8m8(__VA_ARGS__) |
| #define | vle8ff_v_i8m8_m(...) __riscv_vle8ff_v_i8m8_tumu(__VA_ARGS__) |
| #define | vle8ff_v_i8mf2(...) __riscv_vle8ff_v_i8mf2(__VA_ARGS__) |
| #define | vle8ff_v_i8mf2_m(...) __riscv_vle8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vle8ff_v_i8mf4(...) __riscv_vle8ff_v_i8mf4(__VA_ARGS__) |
| #define | vle8ff_v_i8mf4_m(...) __riscv_vle8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vle8ff_v_i8mf8(...) __riscv_vle8ff_v_i8mf8(__VA_ARGS__) |
| #define | vle8ff_v_i8mf8_m(...) __riscv_vle8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vle8ff_v_u8m1(...) __riscv_vle8ff_v_u8m1(__VA_ARGS__) |
| #define | vle8ff_v_u8m1_m(...) __riscv_vle8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define | vle8ff_v_u8m2(...) __riscv_vle8ff_v_u8m2(__VA_ARGS__) |
| #define | vle8ff_v_u8m2_m(...) __riscv_vle8ff_v_u8m2_tumu(__VA_ARGS__) |
| #define | vle8ff_v_u8m4(...) __riscv_vle8ff_v_u8m4(__VA_ARGS__) |
| #define | vle8ff_v_u8m4_m(...) __riscv_vle8ff_v_u8m4_tumu(__VA_ARGS__) |
| #define | vle8ff_v_u8m8(...) __riscv_vle8ff_v_u8m8(__VA_ARGS__) |
| #define | vle8ff_v_u8m8_m(...) __riscv_vle8ff_v_u8m8_tumu(__VA_ARGS__) |
| #define | vle8ff_v_u8mf2(...) __riscv_vle8ff_v_u8mf2(__VA_ARGS__) |
| #define | vle8ff_v_u8mf2_m(...) __riscv_vle8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vle8ff_v_u8mf4(...) __riscv_vle8ff_v_u8mf4(__VA_ARGS__) |
| #define | vle8ff_v_u8mf4_m(...) __riscv_vle8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vle8ff_v_u8mf8(...) __riscv_vle8ff_v_u8mf8(__VA_ARGS__) |
| #define | vle8ff_v_u8mf8_m(...) __riscv_vle8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlm_v_b1(...) __riscv_vlm_v_b1(__VA_ARGS__) |
| #define | vlm_v_b16(...) __riscv_vlm_v_b16(__VA_ARGS__) |
| #define | vlm_v_b2(...) __riscv_vlm_v_b2(__VA_ARGS__) |
| #define | vlm_v_b32(...) __riscv_vlm_v_b32(__VA_ARGS__) |
| #define | vlm_v_b4(...) __riscv_vlm_v_b4(__VA_ARGS__) |
| #define | vlm_v_b64(...) __riscv_vlm_v_b64(__VA_ARGS__) |
| #define | vlm_v_b8(...) __riscv_vlm_v_b8(__VA_ARGS__) |
| #define | vlmul_ext_v_f16m1_f16m2(...) __riscv_vlmul_ext_v_f16m1_f16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_f16m1_f16m4(...) __riscv_vlmul_ext_v_f16m1_f16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f16m1_f16m8(...) __riscv_vlmul_ext_v_f16m1_f16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f16m2_f16m4(...) __riscv_vlmul_ext_v_f16m2_f16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f16m2_f16m8(...) __riscv_vlmul_ext_v_f16m2_f16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f16m4_f16m8(...) __riscv_vlmul_ext_v_f16m4_f16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf2_f16m1(...) __riscv_vlmul_ext_v_f16mf2_f16m1(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf2_f16m2(...) __riscv_vlmul_ext_v_f16mf2_f16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf2_f16m4(...) __riscv_vlmul_ext_v_f16mf2_f16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf2_f16m8(...) __riscv_vlmul_ext_v_f16mf2_f16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf4_f16m1(...) __riscv_vlmul_ext_v_f16mf4_f16m1(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf4_f16m2(...) __riscv_vlmul_ext_v_f16mf4_f16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf4_f16m4(...) __riscv_vlmul_ext_v_f16mf4_f16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf4_f16m8(...) __riscv_vlmul_ext_v_f16mf4_f16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f16mf4_f16mf2(...) __riscv_vlmul_ext_v_f16mf4_f16mf2(__VA_ARGS__) |
| #define | vlmul_ext_v_f32m1_f32m2(...) __riscv_vlmul_ext_v_f32m1_f32m2(__VA_ARGS__) |
| #define | vlmul_ext_v_f32m1_f32m4(...) __riscv_vlmul_ext_v_f32m1_f32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f32m1_f32m8(...) __riscv_vlmul_ext_v_f32m1_f32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f32m2_f32m4(...) __riscv_vlmul_ext_v_f32m2_f32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f32m2_f32m8(...) __riscv_vlmul_ext_v_f32m2_f32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f32m4_f32m8(...) __riscv_vlmul_ext_v_f32m4_f32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f32mf2_f32m1(...) __riscv_vlmul_ext_v_f32mf2_f32m1(__VA_ARGS__) |
| #define | vlmul_ext_v_f32mf2_f32m2(...) __riscv_vlmul_ext_v_f32mf2_f32m2(__VA_ARGS__) |
| #define | vlmul_ext_v_f32mf2_f32m4(...) __riscv_vlmul_ext_v_f32mf2_f32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f32mf2_f32m8(...) __riscv_vlmul_ext_v_f32mf2_f32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f64m1_f64m2(...) __riscv_vlmul_ext_v_f64m1_f64m2(__VA_ARGS__) |
| #define | vlmul_ext_v_f64m1_f64m4(...) __riscv_vlmul_ext_v_f64m1_f64m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f64m1_f64m8(...) __riscv_vlmul_ext_v_f64m1_f64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f64m2_f64m4(...) __riscv_vlmul_ext_v_f64m2_f64m4(__VA_ARGS__) |
| #define | vlmul_ext_v_f64m2_f64m8(...) __riscv_vlmul_ext_v_f64m2_f64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_f64m4_f64m8(...) __riscv_vlmul_ext_v_f64m4_f64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i16m1_i16m2(...) __riscv_vlmul_ext_v_i16m1_i16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i16m1_i16m4(...) __riscv_vlmul_ext_v_i16m1_i16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i16m1_i16m8(...) __riscv_vlmul_ext_v_i16m1_i16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i16m2_i16m4(...) __riscv_vlmul_ext_v_i16m2_i16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i16m2_i16m8(...) __riscv_vlmul_ext_v_i16m2_i16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i16m4_i16m8(...) __riscv_vlmul_ext_v_i16m4_i16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf2_i16m1(...) __riscv_vlmul_ext_v_i16mf2_i16m1(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf2_i16m2(...) __riscv_vlmul_ext_v_i16mf2_i16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf2_i16m4(...) __riscv_vlmul_ext_v_i16mf2_i16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf2_i16m8(...) __riscv_vlmul_ext_v_i16mf2_i16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf4_i16m1(...) __riscv_vlmul_ext_v_i16mf4_i16m1(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf4_i16m2(...) __riscv_vlmul_ext_v_i16mf4_i16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf4_i16m4(...) __riscv_vlmul_ext_v_i16mf4_i16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf4_i16m8(...) __riscv_vlmul_ext_v_i16mf4_i16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i16mf4_i16mf2(...) __riscv_vlmul_ext_v_i16mf4_i16mf2(__VA_ARGS__) |
| #define | vlmul_ext_v_i32m1_i32m2(...) __riscv_vlmul_ext_v_i32m1_i32m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i32m1_i32m4(...) __riscv_vlmul_ext_v_i32m1_i32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i32m1_i32m8(...) __riscv_vlmul_ext_v_i32m1_i32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i32m2_i32m4(...) __riscv_vlmul_ext_v_i32m2_i32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i32m2_i32m8(...) __riscv_vlmul_ext_v_i32m2_i32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i32m4_i32m8(...) __riscv_vlmul_ext_v_i32m4_i32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i32mf2_i32m1(...) __riscv_vlmul_ext_v_i32mf2_i32m1(__VA_ARGS__) |
| #define | vlmul_ext_v_i32mf2_i32m2(...) __riscv_vlmul_ext_v_i32mf2_i32m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i32mf2_i32m4(...) __riscv_vlmul_ext_v_i32mf2_i32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i32mf2_i32m8(...) __riscv_vlmul_ext_v_i32mf2_i32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i64m1_i64m2(...) __riscv_vlmul_ext_v_i64m1_i64m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i64m1_i64m4(...) __riscv_vlmul_ext_v_i64m1_i64m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i64m1_i64m8(...) __riscv_vlmul_ext_v_i64m1_i64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i64m2_i64m4(...) __riscv_vlmul_ext_v_i64m2_i64m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i64m2_i64m8(...) __riscv_vlmul_ext_v_i64m2_i64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i64m4_i64m8(...) __riscv_vlmul_ext_v_i64m4_i64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i8m1_i8m2(...) __riscv_vlmul_ext_v_i8m1_i8m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i8m1_i8m4(...) __riscv_vlmul_ext_v_i8m1_i8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i8m1_i8m8(...) __riscv_vlmul_ext_v_i8m1_i8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i8m2_i8m4(...) __riscv_vlmul_ext_v_i8m2_i8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i8m2_i8m8(...) __riscv_vlmul_ext_v_i8m2_i8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i8m4_i8m8(...) __riscv_vlmul_ext_v_i8m4_i8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf2_i8m1(...) __riscv_vlmul_ext_v_i8mf2_i8m1(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf2_i8m2(...) __riscv_vlmul_ext_v_i8mf2_i8m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf2_i8m4(...) __riscv_vlmul_ext_v_i8mf2_i8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf2_i8m8(...) __riscv_vlmul_ext_v_i8mf2_i8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf4_i8m1(...) __riscv_vlmul_ext_v_i8mf4_i8m1(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf4_i8m2(...) __riscv_vlmul_ext_v_i8mf4_i8m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf4_i8m4(...) __riscv_vlmul_ext_v_i8mf4_i8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf4_i8m8(...) __riscv_vlmul_ext_v_i8mf4_i8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf4_i8mf2(...) __riscv_vlmul_ext_v_i8mf4_i8mf2(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf8_i8m1(...) __riscv_vlmul_ext_v_i8mf8_i8m1(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf8_i8m2(...) __riscv_vlmul_ext_v_i8mf8_i8m2(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf8_i8m4(...) __riscv_vlmul_ext_v_i8mf8_i8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf8_i8m8(...) __riscv_vlmul_ext_v_i8mf8_i8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf8_i8mf2(...) __riscv_vlmul_ext_v_i8mf8_i8mf2(__VA_ARGS__) |
| #define | vlmul_ext_v_i8mf8_i8mf4(...) __riscv_vlmul_ext_v_i8mf8_i8mf4(__VA_ARGS__) |
| #define | vlmul_ext_v_u16m1_u16m2(...) __riscv_vlmul_ext_v_u16m1_u16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u16m1_u16m4(...) __riscv_vlmul_ext_v_u16m1_u16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u16m1_u16m8(...) __riscv_vlmul_ext_v_u16m1_u16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u16m2_u16m4(...) __riscv_vlmul_ext_v_u16m2_u16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u16m2_u16m8(...) __riscv_vlmul_ext_v_u16m2_u16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u16m4_u16m8(...) __riscv_vlmul_ext_v_u16m4_u16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf2_u16m1(...) __riscv_vlmul_ext_v_u16mf2_u16m1(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf2_u16m2(...) __riscv_vlmul_ext_v_u16mf2_u16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf2_u16m4(...) __riscv_vlmul_ext_v_u16mf2_u16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf2_u16m8(...) __riscv_vlmul_ext_v_u16mf2_u16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf4_u16m1(...) __riscv_vlmul_ext_v_u16mf4_u16m1(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf4_u16m2(...) __riscv_vlmul_ext_v_u16mf4_u16m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf4_u16m4(...) __riscv_vlmul_ext_v_u16mf4_u16m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf4_u16m8(...) __riscv_vlmul_ext_v_u16mf4_u16m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u16mf4_u16mf2(...) __riscv_vlmul_ext_v_u16mf4_u16mf2(__VA_ARGS__) |
| #define | vlmul_ext_v_u32m1_u32m2(...) __riscv_vlmul_ext_v_u32m1_u32m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u32m1_u32m4(...) __riscv_vlmul_ext_v_u32m1_u32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u32m1_u32m8(...) __riscv_vlmul_ext_v_u32m1_u32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u32m2_u32m4(...) __riscv_vlmul_ext_v_u32m2_u32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u32m2_u32m8(...) __riscv_vlmul_ext_v_u32m2_u32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u32m4_u32m8(...) __riscv_vlmul_ext_v_u32m4_u32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u32mf2_u32m1(...) __riscv_vlmul_ext_v_u32mf2_u32m1(__VA_ARGS__) |
| #define | vlmul_ext_v_u32mf2_u32m2(...) __riscv_vlmul_ext_v_u32mf2_u32m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u32mf2_u32m4(...) __riscv_vlmul_ext_v_u32mf2_u32m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u32mf2_u32m8(...) __riscv_vlmul_ext_v_u32mf2_u32m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u64m1_u64m2(...) __riscv_vlmul_ext_v_u64m1_u64m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u64m1_u64m4(...) __riscv_vlmul_ext_v_u64m1_u64m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u64m1_u64m8(...) __riscv_vlmul_ext_v_u64m1_u64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u64m2_u64m4(...) __riscv_vlmul_ext_v_u64m2_u64m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u64m2_u64m8(...) __riscv_vlmul_ext_v_u64m2_u64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u64m4_u64m8(...) __riscv_vlmul_ext_v_u64m4_u64m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u8m1_u8m2(...) __riscv_vlmul_ext_v_u8m1_u8m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u8m1_u8m4(...) __riscv_vlmul_ext_v_u8m1_u8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u8m1_u8m8(...) __riscv_vlmul_ext_v_u8m1_u8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u8m2_u8m4(...) __riscv_vlmul_ext_v_u8m2_u8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u8m2_u8m8(...) __riscv_vlmul_ext_v_u8m2_u8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u8m4_u8m8(...) __riscv_vlmul_ext_v_u8m4_u8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf2_u8m1(...) __riscv_vlmul_ext_v_u8mf2_u8m1(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf2_u8m2(...) __riscv_vlmul_ext_v_u8mf2_u8m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf2_u8m4(...) __riscv_vlmul_ext_v_u8mf2_u8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf2_u8m8(...) __riscv_vlmul_ext_v_u8mf2_u8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf4_u8m1(...) __riscv_vlmul_ext_v_u8mf4_u8m1(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf4_u8m2(...) __riscv_vlmul_ext_v_u8mf4_u8m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf4_u8m4(...) __riscv_vlmul_ext_v_u8mf4_u8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf4_u8m8(...) __riscv_vlmul_ext_v_u8mf4_u8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf4_u8mf2(...) __riscv_vlmul_ext_v_u8mf4_u8mf2(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf8_u8m1(...) __riscv_vlmul_ext_v_u8mf8_u8m1(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf8_u8m2(...) __riscv_vlmul_ext_v_u8mf8_u8m2(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf8_u8m4(...) __riscv_vlmul_ext_v_u8mf8_u8m4(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf8_u8m8(...) __riscv_vlmul_ext_v_u8mf8_u8m8(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf8_u8mf2(...) __riscv_vlmul_ext_v_u8mf8_u8mf2(__VA_ARGS__) |
| #define | vlmul_ext_v_u8mf8_u8mf4(...) __riscv_vlmul_ext_v_u8mf8_u8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m1_f16mf2(...) __riscv_vlmul_trunc_v_f16m1_f16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m1_f16mf4(...) __riscv_vlmul_trunc_v_f16m1_f16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m2_f16m1(...) __riscv_vlmul_trunc_v_f16m2_f16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m2_f16mf2(...) __riscv_vlmul_trunc_v_f16m2_f16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m2_f16mf4(...) __riscv_vlmul_trunc_v_f16m2_f16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m4_f16m1(...) __riscv_vlmul_trunc_v_f16m4_f16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m4_f16m2(...) __riscv_vlmul_trunc_v_f16m4_f16m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m4_f16mf2(...) __riscv_vlmul_trunc_v_f16m4_f16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m4_f16mf4(...) __riscv_vlmul_trunc_v_f16m4_f16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m8_f16m1(...) __riscv_vlmul_trunc_v_f16m8_f16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m8_f16m2(...) __riscv_vlmul_trunc_v_f16m8_f16m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m8_f16m4(...) __riscv_vlmul_trunc_v_f16m8_f16m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m8_f16mf2(...) __riscv_vlmul_trunc_v_f16m8_f16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16m8_f16mf4(...) __riscv_vlmul_trunc_v_f16m8_f16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_f16mf2_f16mf4(...) __riscv_vlmul_trunc_v_f16mf2_f16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m1_f32mf2(...) __riscv_vlmul_trunc_v_f32m1_f32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m2_f32m1(...) __riscv_vlmul_trunc_v_f32m2_f32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m2_f32mf2(...) __riscv_vlmul_trunc_v_f32m2_f32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m4_f32m1(...) __riscv_vlmul_trunc_v_f32m4_f32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m4_f32m2(...) __riscv_vlmul_trunc_v_f32m4_f32m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m4_f32mf2(...) __riscv_vlmul_trunc_v_f32m4_f32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m8_f32m1(...) __riscv_vlmul_trunc_v_f32m8_f32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m8_f32m2(...) __riscv_vlmul_trunc_v_f32m8_f32m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m8_f32m4(...) __riscv_vlmul_trunc_v_f32m8_f32m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_f32m8_f32mf2(...) __riscv_vlmul_trunc_v_f32m8_f32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f64m2_f64m1(...) __riscv_vlmul_trunc_v_f64m2_f64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f64m4_f64m1(...) __riscv_vlmul_trunc_v_f64m4_f64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f64m4_f64m2(...) __riscv_vlmul_trunc_v_f64m4_f64m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f64m8_f64m1(...) __riscv_vlmul_trunc_v_f64m8_f64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_f64m8_f64m2(...) __riscv_vlmul_trunc_v_f64m8_f64m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_f64m8_f64m4(...) __riscv_vlmul_trunc_v_f64m8_f64m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m1_i16mf2(...) __riscv_vlmul_trunc_v_i16m1_i16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m1_i16mf4(...) __riscv_vlmul_trunc_v_i16m1_i16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m2_i16m1(...) __riscv_vlmul_trunc_v_i16m2_i16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m2_i16mf2(...) __riscv_vlmul_trunc_v_i16m2_i16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m2_i16mf4(...) __riscv_vlmul_trunc_v_i16m2_i16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m4_i16m1(...) __riscv_vlmul_trunc_v_i16m4_i16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m4_i16m2(...) __riscv_vlmul_trunc_v_i16m4_i16m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m4_i16mf2(...) __riscv_vlmul_trunc_v_i16m4_i16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m4_i16mf4(...) __riscv_vlmul_trunc_v_i16m4_i16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m8_i16m1(...) __riscv_vlmul_trunc_v_i16m8_i16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m8_i16m2(...) __riscv_vlmul_trunc_v_i16m8_i16m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m8_i16m4(...) __riscv_vlmul_trunc_v_i16m8_i16m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m8_i16mf2(...) __riscv_vlmul_trunc_v_i16m8_i16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16m8_i16mf4(...) __riscv_vlmul_trunc_v_i16m8_i16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i16mf2_i16mf4(...) __riscv_vlmul_trunc_v_i16mf2_i16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m1_i32mf2(...) __riscv_vlmul_trunc_v_i32m1_i32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m2_i32m1(...) __riscv_vlmul_trunc_v_i32m2_i32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m2_i32mf2(...) __riscv_vlmul_trunc_v_i32m2_i32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m4_i32m1(...) __riscv_vlmul_trunc_v_i32m4_i32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m4_i32m2(...) __riscv_vlmul_trunc_v_i32m4_i32m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m4_i32mf2(...) __riscv_vlmul_trunc_v_i32m4_i32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m8_i32m1(...) __riscv_vlmul_trunc_v_i32m8_i32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m8_i32m2(...) __riscv_vlmul_trunc_v_i32m8_i32m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m8_i32m4(...) __riscv_vlmul_trunc_v_i32m8_i32m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i32m8_i32mf2(...) __riscv_vlmul_trunc_v_i32m8_i32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i64m2_i64m1(...) __riscv_vlmul_trunc_v_i64m2_i64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i64m4_i64m1(...) __riscv_vlmul_trunc_v_i64m4_i64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i64m4_i64m2(...) __riscv_vlmul_trunc_v_i64m4_i64m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i64m8_i64m1(...) __riscv_vlmul_trunc_v_i64m8_i64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i64m8_i64m2(...) __riscv_vlmul_trunc_v_i64m8_i64m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i64m8_i64m4(...) __riscv_vlmul_trunc_v_i64m8_i64m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m1_i8mf2(...) __riscv_vlmul_trunc_v_i8m1_i8mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m1_i8mf4(...) __riscv_vlmul_trunc_v_i8m1_i8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m1_i8mf8(...) __riscv_vlmul_trunc_v_i8m1_i8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m2_i8m1(...) __riscv_vlmul_trunc_v_i8m2_i8m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m2_i8mf2(...) __riscv_vlmul_trunc_v_i8m2_i8mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m2_i8mf4(...) __riscv_vlmul_trunc_v_i8m2_i8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m2_i8mf8(...) __riscv_vlmul_trunc_v_i8m2_i8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m4_i8m1(...) __riscv_vlmul_trunc_v_i8m4_i8m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m4_i8m2(...) __riscv_vlmul_trunc_v_i8m4_i8m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m4_i8mf2(...) __riscv_vlmul_trunc_v_i8m4_i8mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m4_i8mf4(...) __riscv_vlmul_trunc_v_i8m4_i8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m4_i8mf8(...) __riscv_vlmul_trunc_v_i8m4_i8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m8_i8m1(...) __riscv_vlmul_trunc_v_i8m8_i8m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m8_i8m2(...) __riscv_vlmul_trunc_v_i8m8_i8m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m8_i8m4(...) __riscv_vlmul_trunc_v_i8m8_i8m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m8_i8mf2(...) __riscv_vlmul_trunc_v_i8m8_i8mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m8_i8mf4(...) __riscv_vlmul_trunc_v_i8m8_i8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8m8_i8mf8(...) __riscv_vlmul_trunc_v_i8m8_i8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8mf2_i8mf4(...) __riscv_vlmul_trunc_v_i8mf2_i8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8mf2_i8mf8(...) __riscv_vlmul_trunc_v_i8mf2_i8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_i8mf4_i8mf8(...) __riscv_vlmul_trunc_v_i8mf4_i8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m1_u16mf2(...) __riscv_vlmul_trunc_v_u16m1_u16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m1_u16mf4(...) __riscv_vlmul_trunc_v_u16m1_u16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m2_u16m1(...) __riscv_vlmul_trunc_v_u16m2_u16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m2_u16mf2(...) __riscv_vlmul_trunc_v_u16m2_u16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m2_u16mf4(...) __riscv_vlmul_trunc_v_u16m2_u16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m4_u16m1(...) __riscv_vlmul_trunc_v_u16m4_u16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m4_u16m2(...) __riscv_vlmul_trunc_v_u16m4_u16m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m4_u16mf2(...) __riscv_vlmul_trunc_v_u16m4_u16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m4_u16mf4(...) __riscv_vlmul_trunc_v_u16m4_u16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m8_u16m1(...) __riscv_vlmul_trunc_v_u16m8_u16m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m8_u16m2(...) __riscv_vlmul_trunc_v_u16m8_u16m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m8_u16m4(...) __riscv_vlmul_trunc_v_u16m8_u16m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m8_u16mf2(...) __riscv_vlmul_trunc_v_u16m8_u16mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16m8_u16mf4(...) __riscv_vlmul_trunc_v_u16m8_u16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u16mf2_u16mf4(...) __riscv_vlmul_trunc_v_u16mf2_u16mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m1_u32mf2(...) __riscv_vlmul_trunc_v_u32m1_u32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m2_u32m1(...) __riscv_vlmul_trunc_v_u32m2_u32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m2_u32mf2(...) __riscv_vlmul_trunc_v_u32m2_u32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m4_u32m1(...) __riscv_vlmul_trunc_v_u32m4_u32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m4_u32m2(...) __riscv_vlmul_trunc_v_u32m4_u32m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m4_u32mf2(...) __riscv_vlmul_trunc_v_u32m4_u32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m8_u32m1(...) __riscv_vlmul_trunc_v_u32m8_u32m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m8_u32m2(...) __riscv_vlmul_trunc_v_u32m8_u32m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m8_u32m4(...) __riscv_vlmul_trunc_v_u32m8_u32m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u32m8_u32mf2(...) __riscv_vlmul_trunc_v_u32m8_u32mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u64m2_u64m1(...) __riscv_vlmul_trunc_v_u64m2_u64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u64m4_u64m1(...) __riscv_vlmul_trunc_v_u64m4_u64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u64m4_u64m2(...) __riscv_vlmul_trunc_v_u64m4_u64m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u64m8_u64m1(...) __riscv_vlmul_trunc_v_u64m8_u64m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u64m8_u64m2(...) __riscv_vlmul_trunc_v_u64m8_u64m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u64m8_u64m4(...) __riscv_vlmul_trunc_v_u64m8_u64m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m1_u8mf2(...) __riscv_vlmul_trunc_v_u8m1_u8mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m1_u8mf4(...) __riscv_vlmul_trunc_v_u8m1_u8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m1_u8mf8(...) __riscv_vlmul_trunc_v_u8m1_u8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m2_u8m1(...) __riscv_vlmul_trunc_v_u8m2_u8m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m2_u8mf2(...) __riscv_vlmul_trunc_v_u8m2_u8mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m2_u8mf4(...) __riscv_vlmul_trunc_v_u8m2_u8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m2_u8mf8(...) __riscv_vlmul_trunc_v_u8m2_u8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m4_u8m1(...) __riscv_vlmul_trunc_v_u8m4_u8m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m4_u8m2(...) __riscv_vlmul_trunc_v_u8m4_u8m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m4_u8mf2(...) __riscv_vlmul_trunc_v_u8m4_u8mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m4_u8mf4(...) __riscv_vlmul_trunc_v_u8m4_u8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m4_u8mf8(...) __riscv_vlmul_trunc_v_u8m4_u8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m8_u8m1(...) __riscv_vlmul_trunc_v_u8m8_u8m1(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m8_u8m2(...) __riscv_vlmul_trunc_v_u8m8_u8m2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m8_u8m4(...) __riscv_vlmul_trunc_v_u8m8_u8m4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m8_u8mf2(...) __riscv_vlmul_trunc_v_u8m8_u8mf2(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m8_u8mf4(...) __riscv_vlmul_trunc_v_u8m8_u8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8m8_u8mf8(...) __riscv_vlmul_trunc_v_u8m8_u8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8mf2_u8mf4(...) __riscv_vlmul_trunc_v_u8mf2_u8mf4(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8mf2_u8mf8(...) __riscv_vlmul_trunc_v_u8mf2_u8mf8(__VA_ARGS__) |
| #define | vlmul_trunc_v_u8mf4_u8mf8(...) __riscv_vlmul_trunc_v_u8mf4_u8mf8(__VA_ARGS__) |
| #define | vloxei16_v_f16m1(...) __riscv_vloxei16_v_f16m1(__VA_ARGS__) |
| #define | vloxei16_v_f16m1_m(...) __riscv_vloxei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f16m2(...) __riscv_vloxei16_v_f16m2(__VA_ARGS__) |
| #define | vloxei16_v_f16m2_m(...) __riscv_vloxei16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f16m4(...) __riscv_vloxei16_v_f16m4(__VA_ARGS__) |
| #define | vloxei16_v_f16m4_m(...) __riscv_vloxei16_v_f16m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f16m8(...) __riscv_vloxei16_v_f16m8(__VA_ARGS__) |
| #define | vloxei16_v_f16m8_m(...) __riscv_vloxei16_v_f16m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f16mf2(...) __riscv_vloxei16_v_f16mf2(__VA_ARGS__) |
| #define | vloxei16_v_f16mf2_m(...) __riscv_vloxei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f16mf4(...) __riscv_vloxei16_v_f16mf4(__VA_ARGS__) |
| #define | vloxei16_v_f16mf4_m(...) __riscv_vloxei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f32m1(...) __riscv_vloxei16_v_f32m1(__VA_ARGS__) |
| #define | vloxei16_v_f32m1_m(...) __riscv_vloxei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f32m2(...) __riscv_vloxei16_v_f32m2(__VA_ARGS__) |
| #define | vloxei16_v_f32m2_m(...) __riscv_vloxei16_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f32m4(...) __riscv_vloxei16_v_f32m4(__VA_ARGS__) |
| #define | vloxei16_v_f32m4_m(...) __riscv_vloxei16_v_f32m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f32m8(...) __riscv_vloxei16_v_f32m8(__VA_ARGS__) |
| #define | vloxei16_v_f32m8_m(...) __riscv_vloxei16_v_f32m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f32mf2(...) __riscv_vloxei16_v_f32mf2(__VA_ARGS__) |
| #define | vloxei16_v_f32mf2_m(...) __riscv_vloxei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f64m1(...) __riscv_vloxei16_v_f64m1(__VA_ARGS__) |
| #define | vloxei16_v_f64m1_m(...) __riscv_vloxei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f64m2(...) __riscv_vloxei16_v_f64m2(__VA_ARGS__) |
| #define | vloxei16_v_f64m2_m(...) __riscv_vloxei16_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f64m4(...) __riscv_vloxei16_v_f64m4(__VA_ARGS__) |
| #define | vloxei16_v_f64m4_m(...) __riscv_vloxei16_v_f64m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_f64m8(...) __riscv_vloxei16_v_f64m8(__VA_ARGS__) |
| #define | vloxei16_v_f64m8_m(...) __riscv_vloxei16_v_f64m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i16m1(...) __riscv_vloxei16_v_i16m1(__VA_ARGS__) |
| #define | vloxei16_v_i16m1_m(...) __riscv_vloxei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i16m2(...) __riscv_vloxei16_v_i16m2(__VA_ARGS__) |
| #define | vloxei16_v_i16m2_m(...) __riscv_vloxei16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i16m4(...) __riscv_vloxei16_v_i16m4(__VA_ARGS__) |
| #define | vloxei16_v_i16m4_m(...) __riscv_vloxei16_v_i16m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i16m8(...) __riscv_vloxei16_v_i16m8(__VA_ARGS__) |
| #define | vloxei16_v_i16m8_m(...) __riscv_vloxei16_v_i16m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i16mf2(...) __riscv_vloxei16_v_i16mf2(__VA_ARGS__) |
| #define | vloxei16_v_i16mf2_m(...) __riscv_vloxei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i16mf4(...) __riscv_vloxei16_v_i16mf4(__VA_ARGS__) |
| #define | vloxei16_v_i16mf4_m(...) __riscv_vloxei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i32m1(...) __riscv_vloxei16_v_i32m1(__VA_ARGS__) |
| #define | vloxei16_v_i32m1_m(...) __riscv_vloxei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i32m2(...) __riscv_vloxei16_v_i32m2(__VA_ARGS__) |
| #define | vloxei16_v_i32m2_m(...) __riscv_vloxei16_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i32m4(...) __riscv_vloxei16_v_i32m4(__VA_ARGS__) |
| #define | vloxei16_v_i32m4_m(...) __riscv_vloxei16_v_i32m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i32m8(...) __riscv_vloxei16_v_i32m8(__VA_ARGS__) |
| #define | vloxei16_v_i32m8_m(...) __riscv_vloxei16_v_i32m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i32mf2(...) __riscv_vloxei16_v_i32mf2(__VA_ARGS__) |
| #define | vloxei16_v_i32mf2_m(...) __riscv_vloxei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i64m1(...) __riscv_vloxei16_v_i64m1(__VA_ARGS__) |
| #define | vloxei16_v_i64m1_m(...) __riscv_vloxei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i64m2(...) __riscv_vloxei16_v_i64m2(__VA_ARGS__) |
| #define | vloxei16_v_i64m2_m(...) __riscv_vloxei16_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i64m4(...) __riscv_vloxei16_v_i64m4(__VA_ARGS__) |
| #define | vloxei16_v_i64m4_m(...) __riscv_vloxei16_v_i64m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i64m8(...) __riscv_vloxei16_v_i64m8(__VA_ARGS__) |
| #define | vloxei16_v_i64m8_m(...) __riscv_vloxei16_v_i64m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i8m1(...) __riscv_vloxei16_v_i8m1(__VA_ARGS__) |
| #define | vloxei16_v_i8m1_m(...) __riscv_vloxei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i8m2(...) __riscv_vloxei16_v_i8m2(__VA_ARGS__) |
| #define | vloxei16_v_i8m2_m(...) __riscv_vloxei16_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i8m4(...) __riscv_vloxei16_v_i8m4(__VA_ARGS__) |
| #define | vloxei16_v_i8m4_m(...) __riscv_vloxei16_v_i8m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i8mf2(...) __riscv_vloxei16_v_i8mf2(__VA_ARGS__) |
| #define | vloxei16_v_i8mf2_m(...) __riscv_vloxei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i8mf4(...) __riscv_vloxei16_v_i8mf4(__VA_ARGS__) |
| #define | vloxei16_v_i8mf4_m(...) __riscv_vloxei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_i8mf8(...) __riscv_vloxei16_v_i8mf8(__VA_ARGS__) |
| #define | vloxei16_v_i8mf8_m(...) __riscv_vloxei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u16m1(...) __riscv_vloxei16_v_u16m1(__VA_ARGS__) |
| #define | vloxei16_v_u16m1_m(...) __riscv_vloxei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u16m2(...) __riscv_vloxei16_v_u16m2(__VA_ARGS__) |
| #define | vloxei16_v_u16m2_m(...) __riscv_vloxei16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u16m4(...) __riscv_vloxei16_v_u16m4(__VA_ARGS__) |
| #define | vloxei16_v_u16m4_m(...) __riscv_vloxei16_v_u16m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u16m8(...) __riscv_vloxei16_v_u16m8(__VA_ARGS__) |
| #define | vloxei16_v_u16m8_m(...) __riscv_vloxei16_v_u16m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u16mf2(...) __riscv_vloxei16_v_u16mf2(__VA_ARGS__) |
| #define | vloxei16_v_u16mf2_m(...) __riscv_vloxei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u16mf4(...) __riscv_vloxei16_v_u16mf4(__VA_ARGS__) |
| #define | vloxei16_v_u16mf4_m(...) __riscv_vloxei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u32m1(...) __riscv_vloxei16_v_u32m1(__VA_ARGS__) |
| #define | vloxei16_v_u32m1_m(...) __riscv_vloxei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u32m2(...) __riscv_vloxei16_v_u32m2(__VA_ARGS__) |
| #define | vloxei16_v_u32m2_m(...) __riscv_vloxei16_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u32m4(...) __riscv_vloxei16_v_u32m4(__VA_ARGS__) |
| #define | vloxei16_v_u32m4_m(...) __riscv_vloxei16_v_u32m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u32m8(...) __riscv_vloxei16_v_u32m8(__VA_ARGS__) |
| #define | vloxei16_v_u32m8_m(...) __riscv_vloxei16_v_u32m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u32mf2(...) __riscv_vloxei16_v_u32mf2(__VA_ARGS__) |
| #define | vloxei16_v_u32mf2_m(...) __riscv_vloxei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u64m1(...) __riscv_vloxei16_v_u64m1(__VA_ARGS__) |
| #define | vloxei16_v_u64m1_m(...) __riscv_vloxei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u64m2(...) __riscv_vloxei16_v_u64m2(__VA_ARGS__) |
| #define | vloxei16_v_u64m2_m(...) __riscv_vloxei16_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u64m4(...) __riscv_vloxei16_v_u64m4(__VA_ARGS__) |
| #define | vloxei16_v_u64m4_m(...) __riscv_vloxei16_v_u64m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u64m8(...) __riscv_vloxei16_v_u64m8(__VA_ARGS__) |
| #define | vloxei16_v_u64m8_m(...) __riscv_vloxei16_v_u64m8_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u8m1(...) __riscv_vloxei16_v_u8m1(__VA_ARGS__) |
| #define | vloxei16_v_u8m1_m(...) __riscv_vloxei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u8m2(...) __riscv_vloxei16_v_u8m2(__VA_ARGS__) |
| #define | vloxei16_v_u8m2_m(...) __riscv_vloxei16_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u8m4(...) __riscv_vloxei16_v_u8m4(__VA_ARGS__) |
| #define | vloxei16_v_u8m4_m(...) __riscv_vloxei16_v_u8m4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u8mf2(...) __riscv_vloxei16_v_u8mf2(__VA_ARGS__) |
| #define | vloxei16_v_u8mf2_m(...) __riscv_vloxei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u8mf4(...) __riscv_vloxei16_v_u8mf4(__VA_ARGS__) |
| #define | vloxei16_v_u8mf4_m(...) __riscv_vloxei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxei16_v_u8mf8(...) __riscv_vloxei16_v_u8mf8(__VA_ARGS__) |
| #define | vloxei16_v_u8mf8_m(...) __riscv_vloxei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f16m1(...) __riscv_vloxei32_v_f16m1(__VA_ARGS__) |
| #define | vloxei32_v_f16m1_m(...) __riscv_vloxei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f16m2(...) __riscv_vloxei32_v_f16m2(__VA_ARGS__) |
| #define | vloxei32_v_f16m2_m(...) __riscv_vloxei32_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f16m4(...) __riscv_vloxei32_v_f16m4(__VA_ARGS__) |
| #define | vloxei32_v_f16m4_m(...) __riscv_vloxei32_v_f16m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f16mf2(...) __riscv_vloxei32_v_f16mf2(__VA_ARGS__) |
| #define | vloxei32_v_f16mf2_m(...) __riscv_vloxei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f16mf4(...) __riscv_vloxei32_v_f16mf4(__VA_ARGS__) |
| #define | vloxei32_v_f16mf4_m(...) __riscv_vloxei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f32m1(...) __riscv_vloxei32_v_f32m1(__VA_ARGS__) |
| #define | vloxei32_v_f32m1_m(...) __riscv_vloxei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f32m2(...) __riscv_vloxei32_v_f32m2(__VA_ARGS__) |
| #define | vloxei32_v_f32m2_m(...) __riscv_vloxei32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f32m4(...) __riscv_vloxei32_v_f32m4(__VA_ARGS__) |
| #define | vloxei32_v_f32m4_m(...) __riscv_vloxei32_v_f32m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f32m8(...) __riscv_vloxei32_v_f32m8(__VA_ARGS__) |
| #define | vloxei32_v_f32m8_m(...) __riscv_vloxei32_v_f32m8_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f32mf2(...) __riscv_vloxei32_v_f32mf2(__VA_ARGS__) |
| #define | vloxei32_v_f32mf2_m(...) __riscv_vloxei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f64m1(...) __riscv_vloxei32_v_f64m1(__VA_ARGS__) |
| #define | vloxei32_v_f64m1_m(...) __riscv_vloxei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f64m2(...) __riscv_vloxei32_v_f64m2(__VA_ARGS__) |
| #define | vloxei32_v_f64m2_m(...) __riscv_vloxei32_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f64m4(...) __riscv_vloxei32_v_f64m4(__VA_ARGS__) |
| #define | vloxei32_v_f64m4_m(...) __riscv_vloxei32_v_f64m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_f64m8(...) __riscv_vloxei32_v_f64m8(__VA_ARGS__) |
| #define | vloxei32_v_f64m8_m(...) __riscv_vloxei32_v_f64m8_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i16m1(...) __riscv_vloxei32_v_i16m1(__VA_ARGS__) |
| #define | vloxei32_v_i16m1_m(...) __riscv_vloxei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i16m2(...) __riscv_vloxei32_v_i16m2(__VA_ARGS__) |
| #define | vloxei32_v_i16m2_m(...) __riscv_vloxei32_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i16m4(...) __riscv_vloxei32_v_i16m4(__VA_ARGS__) |
| #define | vloxei32_v_i16m4_m(...) __riscv_vloxei32_v_i16m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i16mf2(...) __riscv_vloxei32_v_i16mf2(__VA_ARGS__) |
| #define | vloxei32_v_i16mf2_m(...) __riscv_vloxei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i16mf4(...) __riscv_vloxei32_v_i16mf4(__VA_ARGS__) |
| #define | vloxei32_v_i16mf4_m(...) __riscv_vloxei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i32m1(...) __riscv_vloxei32_v_i32m1(__VA_ARGS__) |
| #define | vloxei32_v_i32m1_m(...) __riscv_vloxei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i32m2(...) __riscv_vloxei32_v_i32m2(__VA_ARGS__) |
| #define | vloxei32_v_i32m2_m(...) __riscv_vloxei32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i32m4(...) __riscv_vloxei32_v_i32m4(__VA_ARGS__) |
| #define | vloxei32_v_i32m4_m(...) __riscv_vloxei32_v_i32m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i32m8(...) __riscv_vloxei32_v_i32m8(__VA_ARGS__) |
| #define | vloxei32_v_i32m8_m(...) __riscv_vloxei32_v_i32m8_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i32mf2(...) __riscv_vloxei32_v_i32mf2(__VA_ARGS__) |
| #define | vloxei32_v_i32mf2_m(...) __riscv_vloxei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i64m1(...) __riscv_vloxei32_v_i64m1(__VA_ARGS__) |
| #define | vloxei32_v_i64m1_m(...) __riscv_vloxei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i64m2(...) __riscv_vloxei32_v_i64m2(__VA_ARGS__) |
| #define | vloxei32_v_i64m2_m(...) __riscv_vloxei32_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i64m4(...) __riscv_vloxei32_v_i64m4(__VA_ARGS__) |
| #define | vloxei32_v_i64m4_m(...) __riscv_vloxei32_v_i64m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i64m8(...) __riscv_vloxei32_v_i64m8(__VA_ARGS__) |
| #define | vloxei32_v_i64m8_m(...) __riscv_vloxei32_v_i64m8_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i8m1(...) __riscv_vloxei32_v_i8m1(__VA_ARGS__) |
| #define | vloxei32_v_i8m1_m(...) __riscv_vloxei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i8m2(...) __riscv_vloxei32_v_i8m2(__VA_ARGS__) |
| #define | vloxei32_v_i8m2_m(...) __riscv_vloxei32_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i8mf2(...) __riscv_vloxei32_v_i8mf2(__VA_ARGS__) |
| #define | vloxei32_v_i8mf2_m(...) __riscv_vloxei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i8mf4(...) __riscv_vloxei32_v_i8mf4(__VA_ARGS__) |
| #define | vloxei32_v_i8mf4_m(...) __riscv_vloxei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_i8mf8(...) __riscv_vloxei32_v_i8mf8(__VA_ARGS__) |
| #define | vloxei32_v_i8mf8_m(...) __riscv_vloxei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u16m1(...) __riscv_vloxei32_v_u16m1(__VA_ARGS__) |
| #define | vloxei32_v_u16m1_m(...) __riscv_vloxei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u16m2(...) __riscv_vloxei32_v_u16m2(__VA_ARGS__) |
| #define | vloxei32_v_u16m2_m(...) __riscv_vloxei32_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u16m4(...) __riscv_vloxei32_v_u16m4(__VA_ARGS__) |
| #define | vloxei32_v_u16m4_m(...) __riscv_vloxei32_v_u16m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u16mf2(...) __riscv_vloxei32_v_u16mf2(__VA_ARGS__) |
| #define | vloxei32_v_u16mf2_m(...) __riscv_vloxei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u16mf4(...) __riscv_vloxei32_v_u16mf4(__VA_ARGS__) |
| #define | vloxei32_v_u16mf4_m(...) __riscv_vloxei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u32m1(...) __riscv_vloxei32_v_u32m1(__VA_ARGS__) |
| #define | vloxei32_v_u32m1_m(...) __riscv_vloxei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u32m2(...) __riscv_vloxei32_v_u32m2(__VA_ARGS__) |
| #define | vloxei32_v_u32m2_m(...) __riscv_vloxei32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u32m4(...) __riscv_vloxei32_v_u32m4(__VA_ARGS__) |
| #define | vloxei32_v_u32m4_m(...) __riscv_vloxei32_v_u32m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u32m8(...) __riscv_vloxei32_v_u32m8(__VA_ARGS__) |
| #define | vloxei32_v_u32m8_m(...) __riscv_vloxei32_v_u32m8_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u32mf2(...) __riscv_vloxei32_v_u32mf2(__VA_ARGS__) |
| #define | vloxei32_v_u32mf2_m(...) __riscv_vloxei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u64m1(...) __riscv_vloxei32_v_u64m1(__VA_ARGS__) |
| #define | vloxei32_v_u64m1_m(...) __riscv_vloxei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u64m2(...) __riscv_vloxei32_v_u64m2(__VA_ARGS__) |
| #define | vloxei32_v_u64m2_m(...) __riscv_vloxei32_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u64m4(...) __riscv_vloxei32_v_u64m4(__VA_ARGS__) |
| #define | vloxei32_v_u64m4_m(...) __riscv_vloxei32_v_u64m4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u64m8(...) __riscv_vloxei32_v_u64m8(__VA_ARGS__) |
| #define | vloxei32_v_u64m8_m(...) __riscv_vloxei32_v_u64m8_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u8m1(...) __riscv_vloxei32_v_u8m1(__VA_ARGS__) |
| #define | vloxei32_v_u8m1_m(...) __riscv_vloxei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u8m2(...) __riscv_vloxei32_v_u8m2(__VA_ARGS__) |
| #define | vloxei32_v_u8m2_m(...) __riscv_vloxei32_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u8mf2(...) __riscv_vloxei32_v_u8mf2(__VA_ARGS__) |
| #define | vloxei32_v_u8mf2_m(...) __riscv_vloxei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u8mf4(...) __riscv_vloxei32_v_u8mf4(__VA_ARGS__) |
| #define | vloxei32_v_u8mf4_m(...) __riscv_vloxei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxei32_v_u8mf8(...) __riscv_vloxei32_v_u8mf8(__VA_ARGS__) |
| #define | vloxei32_v_u8mf8_m(...) __riscv_vloxei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f16m1(...) __riscv_vloxei64_v_f16m1(__VA_ARGS__) |
| #define | vloxei64_v_f16m1_m(...) __riscv_vloxei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f16m2(...) __riscv_vloxei64_v_f16m2(__VA_ARGS__) |
| #define | vloxei64_v_f16m2_m(...) __riscv_vloxei64_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f16mf2(...) __riscv_vloxei64_v_f16mf2(__VA_ARGS__) |
| #define | vloxei64_v_f16mf2_m(...) __riscv_vloxei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f16mf4(...) __riscv_vloxei64_v_f16mf4(__VA_ARGS__) |
| #define | vloxei64_v_f16mf4_m(...) __riscv_vloxei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f32m1(...) __riscv_vloxei64_v_f32m1(__VA_ARGS__) |
| #define | vloxei64_v_f32m1_m(...) __riscv_vloxei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f32m2(...) __riscv_vloxei64_v_f32m2(__VA_ARGS__) |
| #define | vloxei64_v_f32m2_m(...) __riscv_vloxei64_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f32m4(...) __riscv_vloxei64_v_f32m4(__VA_ARGS__) |
| #define | vloxei64_v_f32m4_m(...) __riscv_vloxei64_v_f32m4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f32mf2(...) __riscv_vloxei64_v_f32mf2(__VA_ARGS__) |
| #define | vloxei64_v_f32mf2_m(...) __riscv_vloxei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f64m1(...) __riscv_vloxei64_v_f64m1(__VA_ARGS__) |
| #define | vloxei64_v_f64m1_m(...) __riscv_vloxei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f64m2(...) __riscv_vloxei64_v_f64m2(__VA_ARGS__) |
| #define | vloxei64_v_f64m2_m(...) __riscv_vloxei64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f64m4(...) __riscv_vloxei64_v_f64m4(__VA_ARGS__) |
| #define | vloxei64_v_f64m4_m(...) __riscv_vloxei64_v_f64m4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_f64m8(...) __riscv_vloxei64_v_f64m8(__VA_ARGS__) |
| #define | vloxei64_v_f64m8_m(...) __riscv_vloxei64_v_f64m8_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i16m1(...) __riscv_vloxei64_v_i16m1(__VA_ARGS__) |
| #define | vloxei64_v_i16m1_m(...) __riscv_vloxei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i16m2(...) __riscv_vloxei64_v_i16m2(__VA_ARGS__) |
| #define | vloxei64_v_i16m2_m(...) __riscv_vloxei64_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i16mf2(...) __riscv_vloxei64_v_i16mf2(__VA_ARGS__) |
| #define | vloxei64_v_i16mf2_m(...) __riscv_vloxei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i16mf4(...) __riscv_vloxei64_v_i16mf4(__VA_ARGS__) |
| #define | vloxei64_v_i16mf4_m(...) __riscv_vloxei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i32m1(...) __riscv_vloxei64_v_i32m1(__VA_ARGS__) |
| #define | vloxei64_v_i32m1_m(...) __riscv_vloxei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i32m2(...) __riscv_vloxei64_v_i32m2(__VA_ARGS__) |
| #define | vloxei64_v_i32m2_m(...) __riscv_vloxei64_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i32m4(...) __riscv_vloxei64_v_i32m4(__VA_ARGS__) |
| #define | vloxei64_v_i32m4_m(...) __riscv_vloxei64_v_i32m4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i32mf2(...) __riscv_vloxei64_v_i32mf2(__VA_ARGS__) |
| #define | vloxei64_v_i32mf2_m(...) __riscv_vloxei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i64m1(...) __riscv_vloxei64_v_i64m1(__VA_ARGS__) |
| #define | vloxei64_v_i64m1_m(...) __riscv_vloxei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i64m2(...) __riscv_vloxei64_v_i64m2(__VA_ARGS__) |
| #define | vloxei64_v_i64m2_m(...) __riscv_vloxei64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i64m4(...) __riscv_vloxei64_v_i64m4(__VA_ARGS__) |
| #define | vloxei64_v_i64m4_m(...) __riscv_vloxei64_v_i64m4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i64m8(...) __riscv_vloxei64_v_i64m8(__VA_ARGS__) |
| #define | vloxei64_v_i64m8_m(...) __riscv_vloxei64_v_i64m8_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i8m1(...) __riscv_vloxei64_v_i8m1(__VA_ARGS__) |
| #define | vloxei64_v_i8m1_m(...) __riscv_vloxei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i8mf2(...) __riscv_vloxei64_v_i8mf2(__VA_ARGS__) |
| #define | vloxei64_v_i8mf2_m(...) __riscv_vloxei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i8mf4(...) __riscv_vloxei64_v_i8mf4(__VA_ARGS__) |
| #define | vloxei64_v_i8mf4_m(...) __riscv_vloxei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_i8mf8(...) __riscv_vloxei64_v_i8mf8(__VA_ARGS__) |
| #define | vloxei64_v_i8mf8_m(...) __riscv_vloxei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u16m1(...) __riscv_vloxei64_v_u16m1(__VA_ARGS__) |
| #define | vloxei64_v_u16m1_m(...) __riscv_vloxei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u16m2(...) __riscv_vloxei64_v_u16m2(__VA_ARGS__) |
| #define | vloxei64_v_u16m2_m(...) __riscv_vloxei64_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u16mf2(...) __riscv_vloxei64_v_u16mf2(__VA_ARGS__) |
| #define | vloxei64_v_u16mf2_m(...) __riscv_vloxei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u16mf4(...) __riscv_vloxei64_v_u16mf4(__VA_ARGS__) |
| #define | vloxei64_v_u16mf4_m(...) __riscv_vloxei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u32m1(...) __riscv_vloxei64_v_u32m1(__VA_ARGS__) |
| #define | vloxei64_v_u32m1_m(...) __riscv_vloxei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u32m2(...) __riscv_vloxei64_v_u32m2(__VA_ARGS__) |
| #define | vloxei64_v_u32m2_m(...) __riscv_vloxei64_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u32m4(...) __riscv_vloxei64_v_u32m4(__VA_ARGS__) |
| #define | vloxei64_v_u32m4_m(...) __riscv_vloxei64_v_u32m4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u32mf2(...) __riscv_vloxei64_v_u32mf2(__VA_ARGS__) |
| #define | vloxei64_v_u32mf2_m(...) __riscv_vloxei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u64m1(...) __riscv_vloxei64_v_u64m1(__VA_ARGS__) |
| #define | vloxei64_v_u64m1_m(...) __riscv_vloxei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u64m2(...) __riscv_vloxei64_v_u64m2(__VA_ARGS__) |
| #define | vloxei64_v_u64m2_m(...) __riscv_vloxei64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u64m4(...) __riscv_vloxei64_v_u64m4(__VA_ARGS__) |
| #define | vloxei64_v_u64m4_m(...) __riscv_vloxei64_v_u64m4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u64m8(...) __riscv_vloxei64_v_u64m8(__VA_ARGS__) |
| #define | vloxei64_v_u64m8_m(...) __riscv_vloxei64_v_u64m8_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u8m1(...) __riscv_vloxei64_v_u8m1(__VA_ARGS__) |
| #define | vloxei64_v_u8m1_m(...) __riscv_vloxei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u8mf2(...) __riscv_vloxei64_v_u8mf2(__VA_ARGS__) |
| #define | vloxei64_v_u8mf2_m(...) __riscv_vloxei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u8mf4(...) __riscv_vloxei64_v_u8mf4(__VA_ARGS__) |
| #define | vloxei64_v_u8mf4_m(...) __riscv_vloxei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxei64_v_u8mf8(...) __riscv_vloxei64_v_u8mf8(__VA_ARGS__) |
| #define | vloxei64_v_u8mf8_m(...) __riscv_vloxei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f16m1(...) __riscv_vloxei8_v_f16m1(__VA_ARGS__) |
| #define | vloxei8_v_f16m1_m(...) __riscv_vloxei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f16m2(...) __riscv_vloxei8_v_f16m2(__VA_ARGS__) |
| #define | vloxei8_v_f16m2_m(...) __riscv_vloxei8_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f16m4(...) __riscv_vloxei8_v_f16m4(__VA_ARGS__) |
| #define | vloxei8_v_f16m4_m(...) __riscv_vloxei8_v_f16m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f16m8(...) __riscv_vloxei8_v_f16m8(__VA_ARGS__) |
| #define | vloxei8_v_f16m8_m(...) __riscv_vloxei8_v_f16m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f16mf2(...) __riscv_vloxei8_v_f16mf2(__VA_ARGS__) |
| #define | vloxei8_v_f16mf2_m(...) __riscv_vloxei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f16mf4(...) __riscv_vloxei8_v_f16mf4(__VA_ARGS__) |
| #define | vloxei8_v_f16mf4_m(...) __riscv_vloxei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f32m1(...) __riscv_vloxei8_v_f32m1(__VA_ARGS__) |
| #define | vloxei8_v_f32m1_m(...) __riscv_vloxei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f32m2(...) __riscv_vloxei8_v_f32m2(__VA_ARGS__) |
| #define | vloxei8_v_f32m2_m(...) __riscv_vloxei8_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f32m4(...) __riscv_vloxei8_v_f32m4(__VA_ARGS__) |
| #define | vloxei8_v_f32m4_m(...) __riscv_vloxei8_v_f32m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f32m8(...) __riscv_vloxei8_v_f32m8(__VA_ARGS__) |
| #define | vloxei8_v_f32m8_m(...) __riscv_vloxei8_v_f32m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f32mf2(...) __riscv_vloxei8_v_f32mf2(__VA_ARGS__) |
| #define | vloxei8_v_f32mf2_m(...) __riscv_vloxei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f64m1(...) __riscv_vloxei8_v_f64m1(__VA_ARGS__) |
| #define | vloxei8_v_f64m1_m(...) __riscv_vloxei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f64m2(...) __riscv_vloxei8_v_f64m2(__VA_ARGS__) |
| #define | vloxei8_v_f64m2_m(...) __riscv_vloxei8_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f64m4(...) __riscv_vloxei8_v_f64m4(__VA_ARGS__) |
| #define | vloxei8_v_f64m4_m(...) __riscv_vloxei8_v_f64m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_f64m8(...) __riscv_vloxei8_v_f64m8(__VA_ARGS__) |
| #define | vloxei8_v_f64m8_m(...) __riscv_vloxei8_v_f64m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i16m1(...) __riscv_vloxei8_v_i16m1(__VA_ARGS__) |
| #define | vloxei8_v_i16m1_m(...) __riscv_vloxei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i16m2(...) __riscv_vloxei8_v_i16m2(__VA_ARGS__) |
| #define | vloxei8_v_i16m2_m(...) __riscv_vloxei8_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i16m4(...) __riscv_vloxei8_v_i16m4(__VA_ARGS__) |
| #define | vloxei8_v_i16m4_m(...) __riscv_vloxei8_v_i16m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i16m8(...) __riscv_vloxei8_v_i16m8(__VA_ARGS__) |
| #define | vloxei8_v_i16m8_m(...) __riscv_vloxei8_v_i16m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i16mf2(...) __riscv_vloxei8_v_i16mf2(__VA_ARGS__) |
| #define | vloxei8_v_i16mf2_m(...) __riscv_vloxei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i16mf4(...) __riscv_vloxei8_v_i16mf4(__VA_ARGS__) |
| #define | vloxei8_v_i16mf4_m(...) __riscv_vloxei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i32m1(...) __riscv_vloxei8_v_i32m1(__VA_ARGS__) |
| #define | vloxei8_v_i32m1_m(...) __riscv_vloxei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i32m2(...) __riscv_vloxei8_v_i32m2(__VA_ARGS__) |
| #define | vloxei8_v_i32m2_m(...) __riscv_vloxei8_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i32m4(...) __riscv_vloxei8_v_i32m4(__VA_ARGS__) |
| #define | vloxei8_v_i32m4_m(...) __riscv_vloxei8_v_i32m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i32m8(...) __riscv_vloxei8_v_i32m8(__VA_ARGS__) |
| #define | vloxei8_v_i32m8_m(...) __riscv_vloxei8_v_i32m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i32mf2(...) __riscv_vloxei8_v_i32mf2(__VA_ARGS__) |
| #define | vloxei8_v_i32mf2_m(...) __riscv_vloxei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i64m1(...) __riscv_vloxei8_v_i64m1(__VA_ARGS__) |
| #define | vloxei8_v_i64m1_m(...) __riscv_vloxei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i64m2(...) __riscv_vloxei8_v_i64m2(__VA_ARGS__) |
| #define | vloxei8_v_i64m2_m(...) __riscv_vloxei8_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i64m4(...) __riscv_vloxei8_v_i64m4(__VA_ARGS__) |
| #define | vloxei8_v_i64m4_m(...) __riscv_vloxei8_v_i64m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i64m8(...) __riscv_vloxei8_v_i64m8(__VA_ARGS__) |
| #define | vloxei8_v_i64m8_m(...) __riscv_vloxei8_v_i64m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i8m1(...) __riscv_vloxei8_v_i8m1(__VA_ARGS__) |
| #define | vloxei8_v_i8m1_m(...) __riscv_vloxei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i8m2(...) __riscv_vloxei8_v_i8m2(__VA_ARGS__) |
| #define | vloxei8_v_i8m2_m(...) __riscv_vloxei8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i8m4(...) __riscv_vloxei8_v_i8m4(__VA_ARGS__) |
| #define | vloxei8_v_i8m4_m(...) __riscv_vloxei8_v_i8m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i8m8(...) __riscv_vloxei8_v_i8m8(__VA_ARGS__) |
| #define | vloxei8_v_i8m8_m(...) __riscv_vloxei8_v_i8m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i8mf2(...) __riscv_vloxei8_v_i8mf2(__VA_ARGS__) |
| #define | vloxei8_v_i8mf2_m(...) __riscv_vloxei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i8mf4(...) __riscv_vloxei8_v_i8mf4(__VA_ARGS__) |
| #define | vloxei8_v_i8mf4_m(...) __riscv_vloxei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_i8mf8(...) __riscv_vloxei8_v_i8mf8(__VA_ARGS__) |
| #define | vloxei8_v_i8mf8_m(...) __riscv_vloxei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u16m1(...) __riscv_vloxei8_v_u16m1(__VA_ARGS__) |
| #define | vloxei8_v_u16m1_m(...) __riscv_vloxei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u16m2(...) __riscv_vloxei8_v_u16m2(__VA_ARGS__) |
| #define | vloxei8_v_u16m2_m(...) __riscv_vloxei8_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u16m4(...) __riscv_vloxei8_v_u16m4(__VA_ARGS__) |
| #define | vloxei8_v_u16m4_m(...) __riscv_vloxei8_v_u16m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u16m8(...) __riscv_vloxei8_v_u16m8(__VA_ARGS__) |
| #define | vloxei8_v_u16m8_m(...) __riscv_vloxei8_v_u16m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u16mf2(...) __riscv_vloxei8_v_u16mf2(__VA_ARGS__) |
| #define | vloxei8_v_u16mf2_m(...) __riscv_vloxei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u16mf4(...) __riscv_vloxei8_v_u16mf4(__VA_ARGS__) |
| #define | vloxei8_v_u16mf4_m(...) __riscv_vloxei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u32m1(...) __riscv_vloxei8_v_u32m1(__VA_ARGS__) |
| #define | vloxei8_v_u32m1_m(...) __riscv_vloxei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u32m2(...) __riscv_vloxei8_v_u32m2(__VA_ARGS__) |
| #define | vloxei8_v_u32m2_m(...) __riscv_vloxei8_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u32m4(...) __riscv_vloxei8_v_u32m4(__VA_ARGS__) |
| #define | vloxei8_v_u32m4_m(...) __riscv_vloxei8_v_u32m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u32m8(...) __riscv_vloxei8_v_u32m8(__VA_ARGS__) |
| #define | vloxei8_v_u32m8_m(...) __riscv_vloxei8_v_u32m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u32mf2(...) __riscv_vloxei8_v_u32mf2(__VA_ARGS__) |
| #define | vloxei8_v_u32mf2_m(...) __riscv_vloxei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u64m1(...) __riscv_vloxei8_v_u64m1(__VA_ARGS__) |
| #define | vloxei8_v_u64m1_m(...) __riscv_vloxei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u64m2(...) __riscv_vloxei8_v_u64m2(__VA_ARGS__) |
| #define | vloxei8_v_u64m2_m(...) __riscv_vloxei8_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u64m4(...) __riscv_vloxei8_v_u64m4(__VA_ARGS__) |
| #define | vloxei8_v_u64m4_m(...) __riscv_vloxei8_v_u64m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u64m8(...) __riscv_vloxei8_v_u64m8(__VA_ARGS__) |
| #define | vloxei8_v_u64m8_m(...) __riscv_vloxei8_v_u64m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u8m1(...) __riscv_vloxei8_v_u8m1(__VA_ARGS__) |
| #define | vloxei8_v_u8m1_m(...) __riscv_vloxei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u8m2(...) __riscv_vloxei8_v_u8m2(__VA_ARGS__) |
| #define | vloxei8_v_u8m2_m(...) __riscv_vloxei8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u8m4(...) __riscv_vloxei8_v_u8m4(__VA_ARGS__) |
| #define | vloxei8_v_u8m4_m(...) __riscv_vloxei8_v_u8m4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u8m8(...) __riscv_vloxei8_v_u8m8(__VA_ARGS__) |
| #define | vloxei8_v_u8m8_m(...) __riscv_vloxei8_v_u8m8_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u8mf2(...) __riscv_vloxei8_v_u8mf2(__VA_ARGS__) |
| #define | vloxei8_v_u8mf2_m(...) __riscv_vloxei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u8mf4(...) __riscv_vloxei8_v_u8mf4(__VA_ARGS__) |
| #define | vloxei8_v_u8mf4_m(...) __riscv_vloxei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxei8_v_u8mf8(...) __riscv_vloxei8_v_u8mf8(__VA_ARGS__) |
| #define | vloxei8_v_u8mf8_m(...) __riscv_vloxei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16m1(...) __riscv_vloxseg2ei16_v_f16m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16m1_m(...) __riscv_vloxseg2ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16m2(...) __riscv_vloxseg2ei16_v_f16m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16m2_m(...) __riscv_vloxseg2ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16m4(...) __riscv_vloxseg2ei16_v_f16m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16m4_m(...) __riscv_vloxseg2ei16_v_f16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16mf2(...) __riscv_vloxseg2ei16_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16mf2_m(...) __riscv_vloxseg2ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16mf4(...) __riscv_vloxseg2ei16_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f16mf4_m(...) __riscv_vloxseg2ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f32m1(...) __riscv_vloxseg2ei16_v_f32m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f32m1_m(...) __riscv_vloxseg2ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f32m2(...) __riscv_vloxseg2ei16_v_f32m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f32m2_m(...) __riscv_vloxseg2ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f32m4(...) __riscv_vloxseg2ei16_v_f32m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f32m4_m(...) __riscv_vloxseg2ei16_v_f32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f32mf2(...) __riscv_vloxseg2ei16_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f32mf2_m(...) __riscv_vloxseg2ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f64m1(...) __riscv_vloxseg2ei16_v_f64m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f64m1_m(...) __riscv_vloxseg2ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f64m2(...) __riscv_vloxseg2ei16_v_f64m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f64m2_m(...) __riscv_vloxseg2ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f64m4(...) __riscv_vloxseg2ei16_v_f64m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_f64m4_m(...) __riscv_vloxseg2ei16_v_f64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16m1(...) __riscv_vloxseg2ei16_v_i16m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16m1_m(...) __riscv_vloxseg2ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16m2(...) __riscv_vloxseg2ei16_v_i16m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16m2_m(...) __riscv_vloxseg2ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16m4(...) __riscv_vloxseg2ei16_v_i16m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16m4_m(...) __riscv_vloxseg2ei16_v_i16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16mf2(...) __riscv_vloxseg2ei16_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16mf2_m(...) __riscv_vloxseg2ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16mf4(...) __riscv_vloxseg2ei16_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i16mf4_m(...) __riscv_vloxseg2ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i32m1(...) __riscv_vloxseg2ei16_v_i32m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i32m1_m(...) __riscv_vloxseg2ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i32m2(...) __riscv_vloxseg2ei16_v_i32m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i32m2_m(...) __riscv_vloxseg2ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i32m4(...) __riscv_vloxseg2ei16_v_i32m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i32m4_m(...) __riscv_vloxseg2ei16_v_i32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i32mf2(...) __riscv_vloxseg2ei16_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i32mf2_m(...) __riscv_vloxseg2ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i64m1(...) __riscv_vloxseg2ei16_v_i64m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i64m1_m(...) __riscv_vloxseg2ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i64m2(...) __riscv_vloxseg2ei16_v_i64m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i64m2_m(...) __riscv_vloxseg2ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i64m4(...) __riscv_vloxseg2ei16_v_i64m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i64m4_m(...) __riscv_vloxseg2ei16_v_i64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8m1(...) __riscv_vloxseg2ei16_v_i8m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8m1_m(...) __riscv_vloxseg2ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8m2(...) __riscv_vloxseg2ei16_v_i8m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8m2_m(...) __riscv_vloxseg2ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8m4(...) __riscv_vloxseg2ei16_v_i8m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8m4_m(...) __riscv_vloxseg2ei16_v_i8m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8mf2(...) __riscv_vloxseg2ei16_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8mf2_m(...) __riscv_vloxseg2ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8mf4(...) __riscv_vloxseg2ei16_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8mf4_m(...) __riscv_vloxseg2ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8mf8(...) __riscv_vloxseg2ei16_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg2ei16_v_i8mf8_m(...) __riscv_vloxseg2ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16m1(...) __riscv_vloxseg2ei16_v_u16m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16m1_m(...) __riscv_vloxseg2ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16m2(...) __riscv_vloxseg2ei16_v_u16m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16m2_m(...) __riscv_vloxseg2ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16m4(...) __riscv_vloxseg2ei16_v_u16m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16m4_m(...) __riscv_vloxseg2ei16_v_u16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16mf2(...) __riscv_vloxseg2ei16_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16mf2_m(...) __riscv_vloxseg2ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16mf4(...) __riscv_vloxseg2ei16_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u16mf4_m(...) __riscv_vloxseg2ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u32m1(...) __riscv_vloxseg2ei16_v_u32m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u32m1_m(...) __riscv_vloxseg2ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u32m2(...) __riscv_vloxseg2ei16_v_u32m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u32m2_m(...) __riscv_vloxseg2ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u32m4(...) __riscv_vloxseg2ei16_v_u32m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u32m4_m(...) __riscv_vloxseg2ei16_v_u32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u32mf2(...) __riscv_vloxseg2ei16_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u32mf2_m(...) __riscv_vloxseg2ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u64m1(...) __riscv_vloxseg2ei16_v_u64m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u64m1_m(...) __riscv_vloxseg2ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u64m2(...) __riscv_vloxseg2ei16_v_u64m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u64m2_m(...) __riscv_vloxseg2ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u64m4(...) __riscv_vloxseg2ei16_v_u64m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u64m4_m(...) __riscv_vloxseg2ei16_v_u64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8m1(...) __riscv_vloxseg2ei16_v_u8m1(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8m1_m(...) __riscv_vloxseg2ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8m2(...) __riscv_vloxseg2ei16_v_u8m2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8m2_m(...) __riscv_vloxseg2ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8m4(...) __riscv_vloxseg2ei16_v_u8m4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8m4_m(...) __riscv_vloxseg2ei16_v_u8m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8mf2(...) __riscv_vloxseg2ei16_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8mf2_m(...) __riscv_vloxseg2ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8mf4(...) __riscv_vloxseg2ei16_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8mf4_m(...) __riscv_vloxseg2ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8mf8(...) __riscv_vloxseg2ei16_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg2ei16_v_u8mf8_m(...) __riscv_vloxseg2ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16m1(...) __riscv_vloxseg2ei32_v_f16m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16m1_m(...) __riscv_vloxseg2ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16m2(...) __riscv_vloxseg2ei32_v_f16m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16m2_m(...) __riscv_vloxseg2ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16m4(...) __riscv_vloxseg2ei32_v_f16m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16m4_m(...) __riscv_vloxseg2ei32_v_f16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16mf2(...) __riscv_vloxseg2ei32_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16mf2_m(...) __riscv_vloxseg2ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16mf4(...) __riscv_vloxseg2ei32_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f16mf4_m(...) __riscv_vloxseg2ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f32m1(...) __riscv_vloxseg2ei32_v_f32m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f32m1_m(...) __riscv_vloxseg2ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f32m2(...) __riscv_vloxseg2ei32_v_f32m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f32m2_m(...) __riscv_vloxseg2ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f32m4(...) __riscv_vloxseg2ei32_v_f32m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f32m4_m(...) __riscv_vloxseg2ei32_v_f32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f32mf2(...) __riscv_vloxseg2ei32_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f32mf2_m(...) __riscv_vloxseg2ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f64m1(...) __riscv_vloxseg2ei32_v_f64m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f64m1_m(...) __riscv_vloxseg2ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f64m2(...) __riscv_vloxseg2ei32_v_f64m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f64m2_m(...) __riscv_vloxseg2ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f64m4(...) __riscv_vloxseg2ei32_v_f64m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_f64m4_m(...) __riscv_vloxseg2ei32_v_f64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16m1(...) __riscv_vloxseg2ei32_v_i16m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16m1_m(...) __riscv_vloxseg2ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16m2(...) __riscv_vloxseg2ei32_v_i16m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16m2_m(...) __riscv_vloxseg2ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16m4(...) __riscv_vloxseg2ei32_v_i16m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16m4_m(...) __riscv_vloxseg2ei32_v_i16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16mf2(...) __riscv_vloxseg2ei32_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16mf2_m(...) __riscv_vloxseg2ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16mf4(...) __riscv_vloxseg2ei32_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i16mf4_m(...) __riscv_vloxseg2ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i32m1(...) __riscv_vloxseg2ei32_v_i32m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i32m1_m(...) __riscv_vloxseg2ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i32m2(...) __riscv_vloxseg2ei32_v_i32m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i32m2_m(...) __riscv_vloxseg2ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i32m4(...) __riscv_vloxseg2ei32_v_i32m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i32m4_m(...) __riscv_vloxseg2ei32_v_i32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i32mf2(...) __riscv_vloxseg2ei32_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i32mf2_m(...) __riscv_vloxseg2ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i64m1(...) __riscv_vloxseg2ei32_v_i64m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i64m1_m(...) __riscv_vloxseg2ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i64m2(...) __riscv_vloxseg2ei32_v_i64m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i64m2_m(...) __riscv_vloxseg2ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i64m4(...) __riscv_vloxseg2ei32_v_i64m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i64m4_m(...) __riscv_vloxseg2ei32_v_i64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8m1(...) __riscv_vloxseg2ei32_v_i8m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8m1_m(...) __riscv_vloxseg2ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8m2(...) __riscv_vloxseg2ei32_v_i8m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8m2_m(...) __riscv_vloxseg2ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8mf2(...) __riscv_vloxseg2ei32_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8mf2_m(...) __riscv_vloxseg2ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8mf4(...) __riscv_vloxseg2ei32_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8mf4_m(...) __riscv_vloxseg2ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8mf8(...) __riscv_vloxseg2ei32_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg2ei32_v_i8mf8_m(...) __riscv_vloxseg2ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16m1(...) __riscv_vloxseg2ei32_v_u16m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16m1_m(...) __riscv_vloxseg2ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16m2(...) __riscv_vloxseg2ei32_v_u16m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16m2_m(...) __riscv_vloxseg2ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16m4(...) __riscv_vloxseg2ei32_v_u16m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16m4_m(...) __riscv_vloxseg2ei32_v_u16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16mf2(...) __riscv_vloxseg2ei32_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16mf2_m(...) __riscv_vloxseg2ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16mf4(...) __riscv_vloxseg2ei32_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u16mf4_m(...) __riscv_vloxseg2ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u32m1(...) __riscv_vloxseg2ei32_v_u32m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u32m1_m(...) __riscv_vloxseg2ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u32m2(...) __riscv_vloxseg2ei32_v_u32m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u32m2_m(...) __riscv_vloxseg2ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u32m4(...) __riscv_vloxseg2ei32_v_u32m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u32m4_m(...) __riscv_vloxseg2ei32_v_u32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u32mf2(...) __riscv_vloxseg2ei32_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u32mf2_m(...) __riscv_vloxseg2ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u64m1(...) __riscv_vloxseg2ei32_v_u64m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u64m1_m(...) __riscv_vloxseg2ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u64m2(...) __riscv_vloxseg2ei32_v_u64m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u64m2_m(...) __riscv_vloxseg2ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u64m4(...) __riscv_vloxseg2ei32_v_u64m4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u64m4_m(...) __riscv_vloxseg2ei32_v_u64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8m1(...) __riscv_vloxseg2ei32_v_u8m1(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8m1_m(...) __riscv_vloxseg2ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8m2(...) __riscv_vloxseg2ei32_v_u8m2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8m2_m(...) __riscv_vloxseg2ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8mf2(...) __riscv_vloxseg2ei32_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8mf2_m(...) __riscv_vloxseg2ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8mf4(...) __riscv_vloxseg2ei32_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8mf4_m(...) __riscv_vloxseg2ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8mf8(...) __riscv_vloxseg2ei32_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg2ei32_v_u8mf8_m(...) __riscv_vloxseg2ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f16m1(...) __riscv_vloxseg2ei64_v_f16m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f16m1_m(...) __riscv_vloxseg2ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f16m2(...) __riscv_vloxseg2ei64_v_f16m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f16m2_m(...) __riscv_vloxseg2ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f16mf2(...) __riscv_vloxseg2ei64_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f16mf2_m(...) __riscv_vloxseg2ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f16mf4(...) __riscv_vloxseg2ei64_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f16mf4_m(...) __riscv_vloxseg2ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f32m1(...) __riscv_vloxseg2ei64_v_f32m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f32m1_m(...) __riscv_vloxseg2ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f32m2(...) __riscv_vloxseg2ei64_v_f32m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f32m2_m(...) __riscv_vloxseg2ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f32m4(...) __riscv_vloxseg2ei64_v_f32m4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f32m4_m(...) __riscv_vloxseg2ei64_v_f32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f32mf2(...) __riscv_vloxseg2ei64_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f32mf2_m(...) __riscv_vloxseg2ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f64m1(...) __riscv_vloxseg2ei64_v_f64m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f64m1_m(...) __riscv_vloxseg2ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f64m2(...) __riscv_vloxseg2ei64_v_f64m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f64m2_m(...) __riscv_vloxseg2ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f64m4(...) __riscv_vloxseg2ei64_v_f64m4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_f64m4_m(...) __riscv_vloxseg2ei64_v_f64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i16m1(...) __riscv_vloxseg2ei64_v_i16m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i16m1_m(...) __riscv_vloxseg2ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i16m2(...) __riscv_vloxseg2ei64_v_i16m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i16m2_m(...) __riscv_vloxseg2ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i16mf2(...) __riscv_vloxseg2ei64_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i16mf2_m(...) __riscv_vloxseg2ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i16mf4(...) __riscv_vloxseg2ei64_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i16mf4_m(...) __riscv_vloxseg2ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i32m1(...) __riscv_vloxseg2ei64_v_i32m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i32m1_m(...) __riscv_vloxseg2ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i32m2(...) __riscv_vloxseg2ei64_v_i32m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i32m2_m(...) __riscv_vloxseg2ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i32m4(...) __riscv_vloxseg2ei64_v_i32m4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i32m4_m(...) __riscv_vloxseg2ei64_v_i32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i32mf2(...) __riscv_vloxseg2ei64_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i32mf2_m(...) __riscv_vloxseg2ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i64m1(...) __riscv_vloxseg2ei64_v_i64m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i64m1_m(...) __riscv_vloxseg2ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i64m2(...) __riscv_vloxseg2ei64_v_i64m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i64m2_m(...) __riscv_vloxseg2ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i64m4(...) __riscv_vloxseg2ei64_v_i64m4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i64m4_m(...) __riscv_vloxseg2ei64_v_i64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i8m1(...) __riscv_vloxseg2ei64_v_i8m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i8m1_m(...) __riscv_vloxseg2ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i8mf2(...) __riscv_vloxseg2ei64_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i8mf2_m(...) __riscv_vloxseg2ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i8mf4(...) __riscv_vloxseg2ei64_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i8mf4_m(...) __riscv_vloxseg2ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i8mf8(...) __riscv_vloxseg2ei64_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg2ei64_v_i8mf8_m(...) __riscv_vloxseg2ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u16m1(...) __riscv_vloxseg2ei64_v_u16m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u16m1_m(...) __riscv_vloxseg2ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u16m2(...) __riscv_vloxseg2ei64_v_u16m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u16m2_m(...) __riscv_vloxseg2ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u16mf2(...) __riscv_vloxseg2ei64_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u16mf2_m(...) __riscv_vloxseg2ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u16mf4(...) __riscv_vloxseg2ei64_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u16mf4_m(...) __riscv_vloxseg2ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u32m1(...) __riscv_vloxseg2ei64_v_u32m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u32m1_m(...) __riscv_vloxseg2ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u32m2(...) __riscv_vloxseg2ei64_v_u32m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u32m2_m(...) __riscv_vloxseg2ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u32m4(...) __riscv_vloxseg2ei64_v_u32m4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u32m4_m(...) __riscv_vloxseg2ei64_v_u32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u32mf2(...) __riscv_vloxseg2ei64_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u32mf2_m(...) __riscv_vloxseg2ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u64m1(...) __riscv_vloxseg2ei64_v_u64m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u64m1_m(...) __riscv_vloxseg2ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u64m2(...) __riscv_vloxseg2ei64_v_u64m2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u64m2_m(...) __riscv_vloxseg2ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u64m4(...) __riscv_vloxseg2ei64_v_u64m4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u64m4_m(...) __riscv_vloxseg2ei64_v_u64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u8m1(...) __riscv_vloxseg2ei64_v_u8m1(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u8m1_m(...) __riscv_vloxseg2ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u8mf2(...) __riscv_vloxseg2ei64_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u8mf2_m(...) __riscv_vloxseg2ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u8mf4(...) __riscv_vloxseg2ei64_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u8mf4_m(...) __riscv_vloxseg2ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u8mf8(...) __riscv_vloxseg2ei64_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg2ei64_v_u8mf8_m(...) __riscv_vloxseg2ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16m1(...) __riscv_vloxseg2ei8_v_f16m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16m1_m(...) __riscv_vloxseg2ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16m2(...) __riscv_vloxseg2ei8_v_f16m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16m2_m(...) __riscv_vloxseg2ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16m4(...) __riscv_vloxseg2ei8_v_f16m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16m4_m(...) __riscv_vloxseg2ei8_v_f16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16mf2(...) __riscv_vloxseg2ei8_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16mf2_m(...) __riscv_vloxseg2ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16mf4(...) __riscv_vloxseg2ei8_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f16mf4_m(...) __riscv_vloxseg2ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f32m1(...) __riscv_vloxseg2ei8_v_f32m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f32m1_m(...) __riscv_vloxseg2ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f32m2(...) __riscv_vloxseg2ei8_v_f32m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f32m2_m(...) __riscv_vloxseg2ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f32m4(...) __riscv_vloxseg2ei8_v_f32m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f32m4_m(...) __riscv_vloxseg2ei8_v_f32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f32mf2(...) __riscv_vloxseg2ei8_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f32mf2_m(...) __riscv_vloxseg2ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f64m1(...) __riscv_vloxseg2ei8_v_f64m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f64m1_m(...) __riscv_vloxseg2ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f64m2(...) __riscv_vloxseg2ei8_v_f64m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f64m2_m(...) __riscv_vloxseg2ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f64m4(...) __riscv_vloxseg2ei8_v_f64m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_f64m4_m(...) __riscv_vloxseg2ei8_v_f64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16m1(...) __riscv_vloxseg2ei8_v_i16m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16m1_m(...) __riscv_vloxseg2ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16m2(...) __riscv_vloxseg2ei8_v_i16m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16m2_m(...) __riscv_vloxseg2ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16m4(...) __riscv_vloxseg2ei8_v_i16m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16m4_m(...) __riscv_vloxseg2ei8_v_i16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16mf2(...) __riscv_vloxseg2ei8_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16mf2_m(...) __riscv_vloxseg2ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16mf4(...) __riscv_vloxseg2ei8_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i16mf4_m(...) __riscv_vloxseg2ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i32m1(...) __riscv_vloxseg2ei8_v_i32m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i32m1_m(...) __riscv_vloxseg2ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i32m2(...) __riscv_vloxseg2ei8_v_i32m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i32m2_m(...) __riscv_vloxseg2ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i32m4(...) __riscv_vloxseg2ei8_v_i32m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i32m4_m(...) __riscv_vloxseg2ei8_v_i32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i32mf2(...) __riscv_vloxseg2ei8_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i32mf2_m(...) __riscv_vloxseg2ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i64m1(...) __riscv_vloxseg2ei8_v_i64m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i64m1_m(...) __riscv_vloxseg2ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i64m2(...) __riscv_vloxseg2ei8_v_i64m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i64m2_m(...) __riscv_vloxseg2ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i64m4(...) __riscv_vloxseg2ei8_v_i64m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i64m4_m(...) __riscv_vloxseg2ei8_v_i64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8m1(...) __riscv_vloxseg2ei8_v_i8m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8m1_m(...) __riscv_vloxseg2ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8m2(...) __riscv_vloxseg2ei8_v_i8m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8m2_m(...) __riscv_vloxseg2ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8m4(...) __riscv_vloxseg2ei8_v_i8m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8m4_m(...) __riscv_vloxseg2ei8_v_i8m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8mf2(...) __riscv_vloxseg2ei8_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8mf2_m(...) __riscv_vloxseg2ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8mf4(...) __riscv_vloxseg2ei8_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8mf4_m(...) __riscv_vloxseg2ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8mf8(...) __riscv_vloxseg2ei8_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg2ei8_v_i8mf8_m(...) __riscv_vloxseg2ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16m1(...) __riscv_vloxseg2ei8_v_u16m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16m1_m(...) __riscv_vloxseg2ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16m2(...) __riscv_vloxseg2ei8_v_u16m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16m2_m(...) __riscv_vloxseg2ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16m4(...) __riscv_vloxseg2ei8_v_u16m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16m4_m(...) __riscv_vloxseg2ei8_v_u16m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16mf2(...) __riscv_vloxseg2ei8_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16mf2_m(...) __riscv_vloxseg2ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16mf4(...) __riscv_vloxseg2ei8_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u16mf4_m(...) __riscv_vloxseg2ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u32m1(...) __riscv_vloxseg2ei8_v_u32m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u32m1_m(...) __riscv_vloxseg2ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u32m2(...) __riscv_vloxseg2ei8_v_u32m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u32m2_m(...) __riscv_vloxseg2ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u32m4(...) __riscv_vloxseg2ei8_v_u32m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u32m4_m(...) __riscv_vloxseg2ei8_v_u32m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u32mf2(...) __riscv_vloxseg2ei8_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u32mf2_m(...) __riscv_vloxseg2ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u64m1(...) __riscv_vloxseg2ei8_v_u64m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u64m1_m(...) __riscv_vloxseg2ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u64m2(...) __riscv_vloxseg2ei8_v_u64m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u64m2_m(...) __riscv_vloxseg2ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u64m4(...) __riscv_vloxseg2ei8_v_u64m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u64m4_m(...) __riscv_vloxseg2ei8_v_u64m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8m1(...) __riscv_vloxseg2ei8_v_u8m1(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8m1_m(...) __riscv_vloxseg2ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8m2(...) __riscv_vloxseg2ei8_v_u8m2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8m2_m(...) __riscv_vloxseg2ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8m4(...) __riscv_vloxseg2ei8_v_u8m4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8m4_m(...) __riscv_vloxseg2ei8_v_u8m4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8mf2(...) __riscv_vloxseg2ei8_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8mf2_m(...) __riscv_vloxseg2ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8mf4(...) __riscv_vloxseg2ei8_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8mf4_m(...) __riscv_vloxseg2ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8mf8(...) __riscv_vloxseg2ei8_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg2ei8_v_u8mf8_m(...) __riscv_vloxseg2ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f16m1(...) __riscv_vloxseg3ei16_v_f16m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f16m1_m(...) __riscv_vloxseg3ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f16m2(...) __riscv_vloxseg3ei16_v_f16m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f16m2_m(...) __riscv_vloxseg3ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f16mf2(...) __riscv_vloxseg3ei16_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f16mf2_m(...) __riscv_vloxseg3ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f16mf4(...) __riscv_vloxseg3ei16_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f16mf4_m(...) __riscv_vloxseg3ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f32m1(...) __riscv_vloxseg3ei16_v_f32m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f32m1_m(...) __riscv_vloxseg3ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f32m2(...) __riscv_vloxseg3ei16_v_f32m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f32m2_m(...) __riscv_vloxseg3ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f32mf2(...) __riscv_vloxseg3ei16_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f32mf2_m(...) __riscv_vloxseg3ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f64m1(...) __riscv_vloxseg3ei16_v_f64m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f64m1_m(...) __riscv_vloxseg3ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f64m2(...) __riscv_vloxseg3ei16_v_f64m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_f64m2_m(...) __riscv_vloxseg3ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i16m1(...) __riscv_vloxseg3ei16_v_i16m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i16m1_m(...) __riscv_vloxseg3ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i16m2(...) __riscv_vloxseg3ei16_v_i16m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i16m2_m(...) __riscv_vloxseg3ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i16mf2(...) __riscv_vloxseg3ei16_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i16mf2_m(...) __riscv_vloxseg3ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i16mf4(...) __riscv_vloxseg3ei16_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i16mf4_m(...) __riscv_vloxseg3ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i32m1(...) __riscv_vloxseg3ei16_v_i32m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i32m1_m(...) __riscv_vloxseg3ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i32m2(...) __riscv_vloxseg3ei16_v_i32m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i32m2_m(...) __riscv_vloxseg3ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i32mf2(...) __riscv_vloxseg3ei16_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i32mf2_m(...) __riscv_vloxseg3ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i64m1(...) __riscv_vloxseg3ei16_v_i64m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i64m1_m(...) __riscv_vloxseg3ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i64m2(...) __riscv_vloxseg3ei16_v_i64m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i64m2_m(...) __riscv_vloxseg3ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8m1(...) __riscv_vloxseg3ei16_v_i8m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8m1_m(...) __riscv_vloxseg3ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8m2(...) __riscv_vloxseg3ei16_v_i8m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8m2_m(...) __riscv_vloxseg3ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8mf2(...) __riscv_vloxseg3ei16_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8mf2_m(...) __riscv_vloxseg3ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8mf4(...) __riscv_vloxseg3ei16_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8mf4_m(...) __riscv_vloxseg3ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8mf8(...) __riscv_vloxseg3ei16_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg3ei16_v_i8mf8_m(...) __riscv_vloxseg3ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u16m1(...) __riscv_vloxseg3ei16_v_u16m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u16m1_m(...) __riscv_vloxseg3ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u16m2(...) __riscv_vloxseg3ei16_v_u16m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u16m2_m(...) __riscv_vloxseg3ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u16mf2(...) __riscv_vloxseg3ei16_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u16mf2_m(...) __riscv_vloxseg3ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u16mf4(...) __riscv_vloxseg3ei16_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u16mf4_m(...) __riscv_vloxseg3ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u32m1(...) __riscv_vloxseg3ei16_v_u32m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u32m1_m(...) __riscv_vloxseg3ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u32m2(...) __riscv_vloxseg3ei16_v_u32m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u32m2_m(...) __riscv_vloxseg3ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u32mf2(...) __riscv_vloxseg3ei16_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u32mf2_m(...) __riscv_vloxseg3ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u64m1(...) __riscv_vloxseg3ei16_v_u64m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u64m1_m(...) __riscv_vloxseg3ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u64m2(...) __riscv_vloxseg3ei16_v_u64m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u64m2_m(...) __riscv_vloxseg3ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8m1(...) __riscv_vloxseg3ei16_v_u8m1(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8m1_m(...) __riscv_vloxseg3ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8m2(...) __riscv_vloxseg3ei16_v_u8m2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8m2_m(...) __riscv_vloxseg3ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8mf2(...) __riscv_vloxseg3ei16_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8mf2_m(...) __riscv_vloxseg3ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8mf4(...) __riscv_vloxseg3ei16_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8mf4_m(...) __riscv_vloxseg3ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8mf8(...) __riscv_vloxseg3ei16_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg3ei16_v_u8mf8_m(...) __riscv_vloxseg3ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f16m1(...) __riscv_vloxseg3ei32_v_f16m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f16m1_m(...) __riscv_vloxseg3ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f16m2(...) __riscv_vloxseg3ei32_v_f16m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f16m2_m(...) __riscv_vloxseg3ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f16mf2(...) __riscv_vloxseg3ei32_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f16mf2_m(...) __riscv_vloxseg3ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f16mf4(...) __riscv_vloxseg3ei32_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f16mf4_m(...) __riscv_vloxseg3ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f32m1(...) __riscv_vloxseg3ei32_v_f32m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f32m1_m(...) __riscv_vloxseg3ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f32m2(...) __riscv_vloxseg3ei32_v_f32m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f32m2_m(...) __riscv_vloxseg3ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f32mf2(...) __riscv_vloxseg3ei32_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f32mf2_m(...) __riscv_vloxseg3ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f64m1(...) __riscv_vloxseg3ei32_v_f64m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f64m1_m(...) __riscv_vloxseg3ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f64m2(...) __riscv_vloxseg3ei32_v_f64m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_f64m2_m(...) __riscv_vloxseg3ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i16m1(...) __riscv_vloxseg3ei32_v_i16m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i16m1_m(...) __riscv_vloxseg3ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i16m2(...) __riscv_vloxseg3ei32_v_i16m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i16m2_m(...) __riscv_vloxseg3ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i16mf2(...) __riscv_vloxseg3ei32_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i16mf2_m(...) __riscv_vloxseg3ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i16mf4(...) __riscv_vloxseg3ei32_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i16mf4_m(...) __riscv_vloxseg3ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i32m1(...) __riscv_vloxseg3ei32_v_i32m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i32m1_m(...) __riscv_vloxseg3ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i32m2(...) __riscv_vloxseg3ei32_v_i32m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i32m2_m(...) __riscv_vloxseg3ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i32mf2(...) __riscv_vloxseg3ei32_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i32mf2_m(...) __riscv_vloxseg3ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i64m1(...) __riscv_vloxseg3ei32_v_i64m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i64m1_m(...) __riscv_vloxseg3ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i64m2(...) __riscv_vloxseg3ei32_v_i64m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i64m2_m(...) __riscv_vloxseg3ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8m1(...) __riscv_vloxseg3ei32_v_i8m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8m1_m(...) __riscv_vloxseg3ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8m2(...) __riscv_vloxseg3ei32_v_i8m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8m2_m(...) __riscv_vloxseg3ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8mf2(...) __riscv_vloxseg3ei32_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8mf2_m(...) __riscv_vloxseg3ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8mf4(...) __riscv_vloxseg3ei32_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8mf4_m(...) __riscv_vloxseg3ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8mf8(...) __riscv_vloxseg3ei32_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg3ei32_v_i8mf8_m(...) __riscv_vloxseg3ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u16m1(...) __riscv_vloxseg3ei32_v_u16m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u16m1_m(...) __riscv_vloxseg3ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u16m2(...) __riscv_vloxseg3ei32_v_u16m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u16m2_m(...) __riscv_vloxseg3ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u16mf2(...) __riscv_vloxseg3ei32_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u16mf2_m(...) __riscv_vloxseg3ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u16mf4(...) __riscv_vloxseg3ei32_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u16mf4_m(...) __riscv_vloxseg3ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u32m1(...) __riscv_vloxseg3ei32_v_u32m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u32m1_m(...) __riscv_vloxseg3ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u32m2(...) __riscv_vloxseg3ei32_v_u32m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u32m2_m(...) __riscv_vloxseg3ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u32mf2(...) __riscv_vloxseg3ei32_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u32mf2_m(...) __riscv_vloxseg3ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u64m1(...) __riscv_vloxseg3ei32_v_u64m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u64m1_m(...) __riscv_vloxseg3ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u64m2(...) __riscv_vloxseg3ei32_v_u64m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u64m2_m(...) __riscv_vloxseg3ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8m1(...) __riscv_vloxseg3ei32_v_u8m1(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8m1_m(...) __riscv_vloxseg3ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8m2(...) __riscv_vloxseg3ei32_v_u8m2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8m2_m(...) __riscv_vloxseg3ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8mf2(...) __riscv_vloxseg3ei32_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8mf2_m(...) __riscv_vloxseg3ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8mf4(...) __riscv_vloxseg3ei32_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8mf4_m(...) __riscv_vloxseg3ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8mf8(...) __riscv_vloxseg3ei32_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg3ei32_v_u8mf8_m(...) __riscv_vloxseg3ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f16m1(...) __riscv_vloxseg3ei64_v_f16m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f16m1_m(...) __riscv_vloxseg3ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f16m2(...) __riscv_vloxseg3ei64_v_f16m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f16m2_m(...) __riscv_vloxseg3ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f16mf2(...) __riscv_vloxseg3ei64_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f16mf2_m(...) __riscv_vloxseg3ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f16mf4(...) __riscv_vloxseg3ei64_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f16mf4_m(...) __riscv_vloxseg3ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f32m1(...) __riscv_vloxseg3ei64_v_f32m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f32m1_m(...) __riscv_vloxseg3ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f32m2(...) __riscv_vloxseg3ei64_v_f32m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f32m2_m(...) __riscv_vloxseg3ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f32mf2(...) __riscv_vloxseg3ei64_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f32mf2_m(...) __riscv_vloxseg3ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f64m1(...) __riscv_vloxseg3ei64_v_f64m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f64m1_m(...) __riscv_vloxseg3ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f64m2(...) __riscv_vloxseg3ei64_v_f64m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_f64m2_m(...) __riscv_vloxseg3ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i16m1(...) __riscv_vloxseg3ei64_v_i16m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i16m1_m(...) __riscv_vloxseg3ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i16m2(...) __riscv_vloxseg3ei64_v_i16m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i16m2_m(...) __riscv_vloxseg3ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i16mf2(...) __riscv_vloxseg3ei64_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i16mf2_m(...) __riscv_vloxseg3ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i16mf4(...) __riscv_vloxseg3ei64_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i16mf4_m(...) __riscv_vloxseg3ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i32m1(...) __riscv_vloxseg3ei64_v_i32m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i32m1_m(...) __riscv_vloxseg3ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i32m2(...) __riscv_vloxseg3ei64_v_i32m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i32m2_m(...) __riscv_vloxseg3ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i32mf2(...) __riscv_vloxseg3ei64_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i32mf2_m(...) __riscv_vloxseg3ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i64m1(...) __riscv_vloxseg3ei64_v_i64m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i64m1_m(...) __riscv_vloxseg3ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i64m2(...) __riscv_vloxseg3ei64_v_i64m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i64m2_m(...) __riscv_vloxseg3ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i8m1(...) __riscv_vloxseg3ei64_v_i8m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i8m1_m(...) __riscv_vloxseg3ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i8mf2(...) __riscv_vloxseg3ei64_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i8mf2_m(...) __riscv_vloxseg3ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i8mf4(...) __riscv_vloxseg3ei64_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i8mf4_m(...) __riscv_vloxseg3ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i8mf8(...) __riscv_vloxseg3ei64_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg3ei64_v_i8mf8_m(...) __riscv_vloxseg3ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u16m1(...) __riscv_vloxseg3ei64_v_u16m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u16m1_m(...) __riscv_vloxseg3ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u16m2(...) __riscv_vloxseg3ei64_v_u16m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u16m2_m(...) __riscv_vloxseg3ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u16mf2(...) __riscv_vloxseg3ei64_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u16mf2_m(...) __riscv_vloxseg3ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u16mf4(...) __riscv_vloxseg3ei64_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u16mf4_m(...) __riscv_vloxseg3ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u32m1(...) __riscv_vloxseg3ei64_v_u32m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u32m1_m(...) __riscv_vloxseg3ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u32m2(...) __riscv_vloxseg3ei64_v_u32m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u32m2_m(...) __riscv_vloxseg3ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u32mf2(...) __riscv_vloxseg3ei64_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u32mf2_m(...) __riscv_vloxseg3ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u64m1(...) __riscv_vloxseg3ei64_v_u64m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u64m1_m(...) __riscv_vloxseg3ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u64m2(...) __riscv_vloxseg3ei64_v_u64m2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u64m2_m(...) __riscv_vloxseg3ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u8m1(...) __riscv_vloxseg3ei64_v_u8m1(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u8m1_m(...) __riscv_vloxseg3ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u8mf2(...) __riscv_vloxseg3ei64_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u8mf2_m(...) __riscv_vloxseg3ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u8mf4(...) __riscv_vloxseg3ei64_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u8mf4_m(...) __riscv_vloxseg3ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u8mf8(...) __riscv_vloxseg3ei64_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg3ei64_v_u8mf8_m(...) __riscv_vloxseg3ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f16m1(...) __riscv_vloxseg3ei8_v_f16m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f16m1_m(...) __riscv_vloxseg3ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f16m2(...) __riscv_vloxseg3ei8_v_f16m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f16m2_m(...) __riscv_vloxseg3ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f16mf2(...) __riscv_vloxseg3ei8_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f16mf2_m(...) __riscv_vloxseg3ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f16mf4(...) __riscv_vloxseg3ei8_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f16mf4_m(...) __riscv_vloxseg3ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f32m1(...) __riscv_vloxseg3ei8_v_f32m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f32m1_m(...) __riscv_vloxseg3ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f32m2(...) __riscv_vloxseg3ei8_v_f32m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f32m2_m(...) __riscv_vloxseg3ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f32mf2(...) __riscv_vloxseg3ei8_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f32mf2_m(...) __riscv_vloxseg3ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f64m1(...) __riscv_vloxseg3ei8_v_f64m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f64m1_m(...) __riscv_vloxseg3ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f64m2(...) __riscv_vloxseg3ei8_v_f64m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_f64m2_m(...) __riscv_vloxseg3ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i16m1(...) __riscv_vloxseg3ei8_v_i16m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i16m1_m(...) __riscv_vloxseg3ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i16m2(...) __riscv_vloxseg3ei8_v_i16m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i16m2_m(...) __riscv_vloxseg3ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i16mf2(...) __riscv_vloxseg3ei8_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i16mf2_m(...) __riscv_vloxseg3ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i16mf4(...) __riscv_vloxseg3ei8_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i16mf4_m(...) __riscv_vloxseg3ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i32m1(...) __riscv_vloxseg3ei8_v_i32m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i32m1_m(...) __riscv_vloxseg3ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i32m2(...) __riscv_vloxseg3ei8_v_i32m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i32m2_m(...) __riscv_vloxseg3ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i32mf2(...) __riscv_vloxseg3ei8_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i32mf2_m(...) __riscv_vloxseg3ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i64m1(...) __riscv_vloxseg3ei8_v_i64m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i64m1_m(...) __riscv_vloxseg3ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i64m2(...) __riscv_vloxseg3ei8_v_i64m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i64m2_m(...) __riscv_vloxseg3ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8m1(...) __riscv_vloxseg3ei8_v_i8m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8m1_m(...) __riscv_vloxseg3ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8m2(...) __riscv_vloxseg3ei8_v_i8m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8m2_m(...) __riscv_vloxseg3ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8mf2(...) __riscv_vloxseg3ei8_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8mf2_m(...) __riscv_vloxseg3ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8mf4(...) __riscv_vloxseg3ei8_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8mf4_m(...) __riscv_vloxseg3ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8mf8(...) __riscv_vloxseg3ei8_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg3ei8_v_i8mf8_m(...) __riscv_vloxseg3ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u16m1(...) __riscv_vloxseg3ei8_v_u16m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u16m1_m(...) __riscv_vloxseg3ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u16m2(...) __riscv_vloxseg3ei8_v_u16m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u16m2_m(...) __riscv_vloxseg3ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u16mf2(...) __riscv_vloxseg3ei8_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u16mf2_m(...) __riscv_vloxseg3ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u16mf4(...) __riscv_vloxseg3ei8_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u16mf4_m(...) __riscv_vloxseg3ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u32m1(...) __riscv_vloxseg3ei8_v_u32m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u32m1_m(...) __riscv_vloxseg3ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u32m2(...) __riscv_vloxseg3ei8_v_u32m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u32m2_m(...) __riscv_vloxseg3ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u32mf2(...) __riscv_vloxseg3ei8_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u32mf2_m(...) __riscv_vloxseg3ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u64m1(...) __riscv_vloxseg3ei8_v_u64m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u64m1_m(...) __riscv_vloxseg3ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u64m2(...) __riscv_vloxseg3ei8_v_u64m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u64m2_m(...) __riscv_vloxseg3ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8m1(...) __riscv_vloxseg3ei8_v_u8m1(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8m1_m(...) __riscv_vloxseg3ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8m2(...) __riscv_vloxseg3ei8_v_u8m2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8m2_m(...) __riscv_vloxseg3ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8mf2(...) __riscv_vloxseg3ei8_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8mf2_m(...) __riscv_vloxseg3ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8mf4(...) __riscv_vloxseg3ei8_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8mf4_m(...) __riscv_vloxseg3ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8mf8(...) __riscv_vloxseg3ei8_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg3ei8_v_u8mf8_m(...) __riscv_vloxseg3ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f16m1(...) __riscv_vloxseg4ei16_v_f16m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f16m1_m(...) __riscv_vloxseg4ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f16m2(...) __riscv_vloxseg4ei16_v_f16m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f16m2_m(...) __riscv_vloxseg4ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f16mf2(...) __riscv_vloxseg4ei16_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f16mf2_m(...) __riscv_vloxseg4ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f16mf4(...) __riscv_vloxseg4ei16_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f16mf4_m(...) __riscv_vloxseg4ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f32m1(...) __riscv_vloxseg4ei16_v_f32m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f32m1_m(...) __riscv_vloxseg4ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f32m2(...) __riscv_vloxseg4ei16_v_f32m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f32m2_m(...) __riscv_vloxseg4ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f32mf2(...) __riscv_vloxseg4ei16_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f32mf2_m(...) __riscv_vloxseg4ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f64m1(...) __riscv_vloxseg4ei16_v_f64m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f64m1_m(...) __riscv_vloxseg4ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f64m2(...) __riscv_vloxseg4ei16_v_f64m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_f64m2_m(...) __riscv_vloxseg4ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i16m1(...) __riscv_vloxseg4ei16_v_i16m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i16m1_m(...) __riscv_vloxseg4ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i16m2(...) __riscv_vloxseg4ei16_v_i16m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i16m2_m(...) __riscv_vloxseg4ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i16mf2(...) __riscv_vloxseg4ei16_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i16mf2_m(...) __riscv_vloxseg4ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i16mf4(...) __riscv_vloxseg4ei16_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i16mf4_m(...) __riscv_vloxseg4ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i32m1(...) __riscv_vloxseg4ei16_v_i32m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i32m1_m(...) __riscv_vloxseg4ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i32m2(...) __riscv_vloxseg4ei16_v_i32m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i32m2_m(...) __riscv_vloxseg4ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i32mf2(...) __riscv_vloxseg4ei16_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i32mf2_m(...) __riscv_vloxseg4ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i64m1(...) __riscv_vloxseg4ei16_v_i64m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i64m1_m(...) __riscv_vloxseg4ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i64m2(...) __riscv_vloxseg4ei16_v_i64m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i64m2_m(...) __riscv_vloxseg4ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8m1(...) __riscv_vloxseg4ei16_v_i8m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8m1_m(...) __riscv_vloxseg4ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8m2(...) __riscv_vloxseg4ei16_v_i8m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8m2_m(...) __riscv_vloxseg4ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8mf2(...) __riscv_vloxseg4ei16_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8mf2_m(...) __riscv_vloxseg4ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8mf4(...) __riscv_vloxseg4ei16_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8mf4_m(...) __riscv_vloxseg4ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8mf8(...) __riscv_vloxseg4ei16_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg4ei16_v_i8mf8_m(...) __riscv_vloxseg4ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u16m1(...) __riscv_vloxseg4ei16_v_u16m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u16m1_m(...) __riscv_vloxseg4ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u16m2(...) __riscv_vloxseg4ei16_v_u16m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u16m2_m(...) __riscv_vloxseg4ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u16mf2(...) __riscv_vloxseg4ei16_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u16mf2_m(...) __riscv_vloxseg4ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u16mf4(...) __riscv_vloxseg4ei16_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u16mf4_m(...) __riscv_vloxseg4ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u32m1(...) __riscv_vloxseg4ei16_v_u32m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u32m1_m(...) __riscv_vloxseg4ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u32m2(...) __riscv_vloxseg4ei16_v_u32m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u32m2_m(...) __riscv_vloxseg4ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u32mf2(...) __riscv_vloxseg4ei16_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u32mf2_m(...) __riscv_vloxseg4ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u64m1(...) __riscv_vloxseg4ei16_v_u64m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u64m1_m(...) __riscv_vloxseg4ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u64m2(...) __riscv_vloxseg4ei16_v_u64m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u64m2_m(...) __riscv_vloxseg4ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8m1(...) __riscv_vloxseg4ei16_v_u8m1(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8m1_m(...) __riscv_vloxseg4ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8m2(...) __riscv_vloxseg4ei16_v_u8m2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8m2_m(...) __riscv_vloxseg4ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8mf2(...) __riscv_vloxseg4ei16_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8mf2_m(...) __riscv_vloxseg4ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8mf4(...) __riscv_vloxseg4ei16_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8mf4_m(...) __riscv_vloxseg4ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8mf8(...) __riscv_vloxseg4ei16_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg4ei16_v_u8mf8_m(...) __riscv_vloxseg4ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f16m1(...) __riscv_vloxseg4ei32_v_f16m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f16m1_m(...) __riscv_vloxseg4ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f16m2(...) __riscv_vloxseg4ei32_v_f16m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f16m2_m(...) __riscv_vloxseg4ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f16mf2(...) __riscv_vloxseg4ei32_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f16mf2_m(...) __riscv_vloxseg4ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f16mf4(...) __riscv_vloxseg4ei32_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f16mf4_m(...) __riscv_vloxseg4ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f32m1(...) __riscv_vloxseg4ei32_v_f32m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f32m1_m(...) __riscv_vloxseg4ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f32m2(...) __riscv_vloxseg4ei32_v_f32m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f32m2_m(...) __riscv_vloxseg4ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f32mf2(...) __riscv_vloxseg4ei32_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f32mf2_m(...) __riscv_vloxseg4ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f64m1(...) __riscv_vloxseg4ei32_v_f64m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f64m1_m(...) __riscv_vloxseg4ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f64m2(...) __riscv_vloxseg4ei32_v_f64m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_f64m2_m(...) __riscv_vloxseg4ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i16m1(...) __riscv_vloxseg4ei32_v_i16m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i16m1_m(...) __riscv_vloxseg4ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i16m2(...) __riscv_vloxseg4ei32_v_i16m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i16m2_m(...) __riscv_vloxseg4ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i16mf2(...) __riscv_vloxseg4ei32_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i16mf2_m(...) __riscv_vloxseg4ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i16mf4(...) __riscv_vloxseg4ei32_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i16mf4_m(...) __riscv_vloxseg4ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i32m1(...) __riscv_vloxseg4ei32_v_i32m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i32m1_m(...) __riscv_vloxseg4ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i32m2(...) __riscv_vloxseg4ei32_v_i32m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i32m2_m(...) __riscv_vloxseg4ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i32mf2(...) __riscv_vloxseg4ei32_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i32mf2_m(...) __riscv_vloxseg4ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i64m1(...) __riscv_vloxseg4ei32_v_i64m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i64m1_m(...) __riscv_vloxseg4ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i64m2(...) __riscv_vloxseg4ei32_v_i64m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i64m2_m(...) __riscv_vloxseg4ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8m1(...) __riscv_vloxseg4ei32_v_i8m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8m1_m(...) __riscv_vloxseg4ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8m2(...) __riscv_vloxseg4ei32_v_i8m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8m2_m(...) __riscv_vloxseg4ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8mf2(...) __riscv_vloxseg4ei32_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8mf2_m(...) __riscv_vloxseg4ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8mf4(...) __riscv_vloxseg4ei32_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8mf4_m(...) __riscv_vloxseg4ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8mf8(...) __riscv_vloxseg4ei32_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg4ei32_v_i8mf8_m(...) __riscv_vloxseg4ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u16m1(...) __riscv_vloxseg4ei32_v_u16m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u16m1_m(...) __riscv_vloxseg4ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u16m2(...) __riscv_vloxseg4ei32_v_u16m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u16m2_m(...) __riscv_vloxseg4ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u16mf2(...) __riscv_vloxseg4ei32_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u16mf2_m(...) __riscv_vloxseg4ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u16mf4(...) __riscv_vloxseg4ei32_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u16mf4_m(...) __riscv_vloxseg4ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u32m1(...) __riscv_vloxseg4ei32_v_u32m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u32m1_m(...) __riscv_vloxseg4ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u32m2(...) __riscv_vloxseg4ei32_v_u32m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u32m2_m(...) __riscv_vloxseg4ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u32mf2(...) __riscv_vloxseg4ei32_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u32mf2_m(...) __riscv_vloxseg4ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u64m1(...) __riscv_vloxseg4ei32_v_u64m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u64m1_m(...) __riscv_vloxseg4ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u64m2(...) __riscv_vloxseg4ei32_v_u64m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u64m2_m(...) __riscv_vloxseg4ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8m1(...) __riscv_vloxseg4ei32_v_u8m1(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8m1_m(...) __riscv_vloxseg4ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8m2(...) __riscv_vloxseg4ei32_v_u8m2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8m2_m(...) __riscv_vloxseg4ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8mf2(...) __riscv_vloxseg4ei32_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8mf2_m(...) __riscv_vloxseg4ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8mf4(...) __riscv_vloxseg4ei32_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8mf4_m(...) __riscv_vloxseg4ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8mf8(...) __riscv_vloxseg4ei32_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg4ei32_v_u8mf8_m(...) __riscv_vloxseg4ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f16m1(...) __riscv_vloxseg4ei64_v_f16m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f16m1_m(...) __riscv_vloxseg4ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f16m2(...) __riscv_vloxseg4ei64_v_f16m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f16m2_m(...) __riscv_vloxseg4ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f16mf2(...) __riscv_vloxseg4ei64_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f16mf2_m(...) __riscv_vloxseg4ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f16mf4(...) __riscv_vloxseg4ei64_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f16mf4_m(...) __riscv_vloxseg4ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f32m1(...) __riscv_vloxseg4ei64_v_f32m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f32m1_m(...) __riscv_vloxseg4ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f32m2(...) __riscv_vloxseg4ei64_v_f32m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f32m2_m(...) __riscv_vloxseg4ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f32mf2(...) __riscv_vloxseg4ei64_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f32mf2_m(...) __riscv_vloxseg4ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f64m1(...) __riscv_vloxseg4ei64_v_f64m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f64m1_m(...) __riscv_vloxseg4ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f64m2(...) __riscv_vloxseg4ei64_v_f64m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_f64m2_m(...) __riscv_vloxseg4ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i16m1(...) __riscv_vloxseg4ei64_v_i16m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i16m1_m(...) __riscv_vloxseg4ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i16m2(...) __riscv_vloxseg4ei64_v_i16m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i16m2_m(...) __riscv_vloxseg4ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i16mf2(...) __riscv_vloxseg4ei64_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i16mf2_m(...) __riscv_vloxseg4ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i16mf4(...) __riscv_vloxseg4ei64_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i16mf4_m(...) __riscv_vloxseg4ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i32m1(...) __riscv_vloxseg4ei64_v_i32m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i32m1_m(...) __riscv_vloxseg4ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i32m2(...) __riscv_vloxseg4ei64_v_i32m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i32m2_m(...) __riscv_vloxseg4ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i32mf2(...) __riscv_vloxseg4ei64_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i32mf2_m(...) __riscv_vloxseg4ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i64m1(...) __riscv_vloxseg4ei64_v_i64m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i64m1_m(...) __riscv_vloxseg4ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i64m2(...) __riscv_vloxseg4ei64_v_i64m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i64m2_m(...) __riscv_vloxseg4ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i8m1(...) __riscv_vloxseg4ei64_v_i8m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i8m1_m(...) __riscv_vloxseg4ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i8mf2(...) __riscv_vloxseg4ei64_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i8mf2_m(...) __riscv_vloxseg4ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i8mf4(...) __riscv_vloxseg4ei64_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i8mf4_m(...) __riscv_vloxseg4ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i8mf8(...) __riscv_vloxseg4ei64_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg4ei64_v_i8mf8_m(...) __riscv_vloxseg4ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u16m1(...) __riscv_vloxseg4ei64_v_u16m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u16m1_m(...) __riscv_vloxseg4ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u16m2(...) __riscv_vloxseg4ei64_v_u16m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u16m2_m(...) __riscv_vloxseg4ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u16mf2(...) __riscv_vloxseg4ei64_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u16mf2_m(...) __riscv_vloxseg4ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u16mf4(...) __riscv_vloxseg4ei64_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u16mf4_m(...) __riscv_vloxseg4ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u32m1(...) __riscv_vloxseg4ei64_v_u32m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u32m1_m(...) __riscv_vloxseg4ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u32m2(...) __riscv_vloxseg4ei64_v_u32m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u32m2_m(...) __riscv_vloxseg4ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u32mf2(...) __riscv_vloxseg4ei64_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u32mf2_m(...) __riscv_vloxseg4ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u64m1(...) __riscv_vloxseg4ei64_v_u64m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u64m1_m(...) __riscv_vloxseg4ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u64m2(...) __riscv_vloxseg4ei64_v_u64m2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u64m2_m(...) __riscv_vloxseg4ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u8m1(...) __riscv_vloxseg4ei64_v_u8m1(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u8m1_m(...) __riscv_vloxseg4ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u8mf2(...) __riscv_vloxseg4ei64_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u8mf2_m(...) __riscv_vloxseg4ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u8mf4(...) __riscv_vloxseg4ei64_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u8mf4_m(...) __riscv_vloxseg4ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u8mf8(...) __riscv_vloxseg4ei64_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg4ei64_v_u8mf8_m(...) __riscv_vloxseg4ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f16m1(...) __riscv_vloxseg4ei8_v_f16m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f16m1_m(...) __riscv_vloxseg4ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f16m2(...) __riscv_vloxseg4ei8_v_f16m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f16m2_m(...) __riscv_vloxseg4ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f16mf2(...) __riscv_vloxseg4ei8_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f16mf2_m(...) __riscv_vloxseg4ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f16mf4(...) __riscv_vloxseg4ei8_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f16mf4_m(...) __riscv_vloxseg4ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f32m1(...) __riscv_vloxseg4ei8_v_f32m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f32m1_m(...) __riscv_vloxseg4ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f32m2(...) __riscv_vloxseg4ei8_v_f32m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f32m2_m(...) __riscv_vloxseg4ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f32mf2(...) __riscv_vloxseg4ei8_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f32mf2_m(...) __riscv_vloxseg4ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f64m1(...) __riscv_vloxseg4ei8_v_f64m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f64m1_m(...) __riscv_vloxseg4ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f64m2(...) __riscv_vloxseg4ei8_v_f64m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_f64m2_m(...) __riscv_vloxseg4ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i16m1(...) __riscv_vloxseg4ei8_v_i16m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i16m1_m(...) __riscv_vloxseg4ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i16m2(...) __riscv_vloxseg4ei8_v_i16m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i16m2_m(...) __riscv_vloxseg4ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i16mf2(...) __riscv_vloxseg4ei8_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i16mf2_m(...) __riscv_vloxseg4ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i16mf4(...) __riscv_vloxseg4ei8_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i16mf4_m(...) __riscv_vloxseg4ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i32m1(...) __riscv_vloxseg4ei8_v_i32m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i32m1_m(...) __riscv_vloxseg4ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i32m2(...) __riscv_vloxseg4ei8_v_i32m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i32m2_m(...) __riscv_vloxseg4ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i32mf2(...) __riscv_vloxseg4ei8_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i32mf2_m(...) __riscv_vloxseg4ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i64m1(...) __riscv_vloxseg4ei8_v_i64m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i64m1_m(...) __riscv_vloxseg4ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i64m2(...) __riscv_vloxseg4ei8_v_i64m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i64m2_m(...) __riscv_vloxseg4ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8m1(...) __riscv_vloxseg4ei8_v_i8m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8m1_m(...) __riscv_vloxseg4ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8m2(...) __riscv_vloxseg4ei8_v_i8m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8m2_m(...) __riscv_vloxseg4ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8mf2(...) __riscv_vloxseg4ei8_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8mf2_m(...) __riscv_vloxseg4ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8mf4(...) __riscv_vloxseg4ei8_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8mf4_m(...) __riscv_vloxseg4ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8mf8(...) __riscv_vloxseg4ei8_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg4ei8_v_i8mf8_m(...) __riscv_vloxseg4ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u16m1(...) __riscv_vloxseg4ei8_v_u16m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u16m1_m(...) __riscv_vloxseg4ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u16m2(...) __riscv_vloxseg4ei8_v_u16m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u16m2_m(...) __riscv_vloxseg4ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u16mf2(...) __riscv_vloxseg4ei8_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u16mf2_m(...) __riscv_vloxseg4ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u16mf4(...) __riscv_vloxseg4ei8_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u16mf4_m(...) __riscv_vloxseg4ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u32m1(...) __riscv_vloxseg4ei8_v_u32m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u32m1_m(...) __riscv_vloxseg4ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u32m2(...) __riscv_vloxseg4ei8_v_u32m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u32m2_m(...) __riscv_vloxseg4ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u32mf2(...) __riscv_vloxseg4ei8_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u32mf2_m(...) __riscv_vloxseg4ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u64m1(...) __riscv_vloxseg4ei8_v_u64m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u64m1_m(...) __riscv_vloxseg4ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u64m2(...) __riscv_vloxseg4ei8_v_u64m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u64m2_m(...) __riscv_vloxseg4ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8m1(...) __riscv_vloxseg4ei8_v_u8m1(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8m1_m(...) __riscv_vloxseg4ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8m2(...) __riscv_vloxseg4ei8_v_u8m2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8m2_m(...) __riscv_vloxseg4ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8mf2(...) __riscv_vloxseg4ei8_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8mf2_m(...) __riscv_vloxseg4ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8mf4(...) __riscv_vloxseg4ei8_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8mf4_m(...) __riscv_vloxseg4ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8mf8(...) __riscv_vloxseg4ei8_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg4ei8_v_u8mf8_m(...) __riscv_vloxseg4ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f16m1(...) __riscv_vloxseg5ei16_v_f16m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f16m1_m(...) __riscv_vloxseg5ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f16mf2(...) __riscv_vloxseg5ei16_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f16mf2_m(...) __riscv_vloxseg5ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f16mf4(...) __riscv_vloxseg5ei16_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f16mf4_m(...) __riscv_vloxseg5ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f32m1(...) __riscv_vloxseg5ei16_v_f32m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f32m1_m(...) __riscv_vloxseg5ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f32mf2(...) __riscv_vloxseg5ei16_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f32mf2_m(...) __riscv_vloxseg5ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f64m1(...) __riscv_vloxseg5ei16_v_f64m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_f64m1_m(...) __riscv_vloxseg5ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i16m1(...) __riscv_vloxseg5ei16_v_i16m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i16m1_m(...) __riscv_vloxseg5ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i16mf2(...) __riscv_vloxseg5ei16_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i16mf2_m(...) __riscv_vloxseg5ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i16mf4(...) __riscv_vloxseg5ei16_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i16mf4_m(...) __riscv_vloxseg5ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i32m1(...) __riscv_vloxseg5ei16_v_i32m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i32m1_m(...) __riscv_vloxseg5ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i32mf2(...) __riscv_vloxseg5ei16_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i32mf2_m(...) __riscv_vloxseg5ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i64m1(...) __riscv_vloxseg5ei16_v_i64m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i64m1_m(...) __riscv_vloxseg5ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i8m1(...) __riscv_vloxseg5ei16_v_i8m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i8m1_m(...) __riscv_vloxseg5ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i8mf2(...) __riscv_vloxseg5ei16_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i8mf2_m(...) __riscv_vloxseg5ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i8mf4(...) __riscv_vloxseg5ei16_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i8mf4_m(...) __riscv_vloxseg5ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i8mf8(...) __riscv_vloxseg5ei16_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg5ei16_v_i8mf8_m(...) __riscv_vloxseg5ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u16m1(...) __riscv_vloxseg5ei16_v_u16m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u16m1_m(...) __riscv_vloxseg5ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u16mf2(...) __riscv_vloxseg5ei16_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u16mf2_m(...) __riscv_vloxseg5ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u16mf4(...) __riscv_vloxseg5ei16_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u16mf4_m(...) __riscv_vloxseg5ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u32m1(...) __riscv_vloxseg5ei16_v_u32m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u32m1_m(...) __riscv_vloxseg5ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u32mf2(...) __riscv_vloxseg5ei16_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u32mf2_m(...) __riscv_vloxseg5ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u64m1(...) __riscv_vloxseg5ei16_v_u64m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u64m1_m(...) __riscv_vloxseg5ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u8m1(...) __riscv_vloxseg5ei16_v_u8m1(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u8m1_m(...) __riscv_vloxseg5ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u8mf2(...) __riscv_vloxseg5ei16_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u8mf2_m(...) __riscv_vloxseg5ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u8mf4(...) __riscv_vloxseg5ei16_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u8mf4_m(...) __riscv_vloxseg5ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u8mf8(...) __riscv_vloxseg5ei16_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg5ei16_v_u8mf8_m(...) __riscv_vloxseg5ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f16m1(...) __riscv_vloxseg5ei32_v_f16m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f16m1_m(...) __riscv_vloxseg5ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f16mf2(...) __riscv_vloxseg5ei32_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f16mf2_m(...) __riscv_vloxseg5ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f16mf4(...) __riscv_vloxseg5ei32_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f16mf4_m(...) __riscv_vloxseg5ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f32m1(...) __riscv_vloxseg5ei32_v_f32m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f32m1_m(...) __riscv_vloxseg5ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f32mf2(...) __riscv_vloxseg5ei32_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f32mf2_m(...) __riscv_vloxseg5ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f64m1(...) __riscv_vloxseg5ei32_v_f64m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_f64m1_m(...) __riscv_vloxseg5ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i16m1(...) __riscv_vloxseg5ei32_v_i16m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i16m1_m(...) __riscv_vloxseg5ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i16mf2(...) __riscv_vloxseg5ei32_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i16mf2_m(...) __riscv_vloxseg5ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i16mf4(...) __riscv_vloxseg5ei32_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i16mf4_m(...) __riscv_vloxseg5ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i32m1(...) __riscv_vloxseg5ei32_v_i32m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i32m1_m(...) __riscv_vloxseg5ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i32mf2(...) __riscv_vloxseg5ei32_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i32mf2_m(...) __riscv_vloxseg5ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i64m1(...) __riscv_vloxseg5ei32_v_i64m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i64m1_m(...) __riscv_vloxseg5ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i8m1(...) __riscv_vloxseg5ei32_v_i8m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i8m1_m(...) __riscv_vloxseg5ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i8mf2(...) __riscv_vloxseg5ei32_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i8mf2_m(...) __riscv_vloxseg5ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i8mf4(...) __riscv_vloxseg5ei32_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i8mf4_m(...) __riscv_vloxseg5ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i8mf8(...) __riscv_vloxseg5ei32_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg5ei32_v_i8mf8_m(...) __riscv_vloxseg5ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u16m1(...) __riscv_vloxseg5ei32_v_u16m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u16m1_m(...) __riscv_vloxseg5ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u16mf2(...) __riscv_vloxseg5ei32_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u16mf2_m(...) __riscv_vloxseg5ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u16mf4(...) __riscv_vloxseg5ei32_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u16mf4_m(...) __riscv_vloxseg5ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u32m1(...) __riscv_vloxseg5ei32_v_u32m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u32m1_m(...) __riscv_vloxseg5ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u32mf2(...) __riscv_vloxseg5ei32_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u32mf2_m(...) __riscv_vloxseg5ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u64m1(...) __riscv_vloxseg5ei32_v_u64m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u64m1_m(...) __riscv_vloxseg5ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u8m1(...) __riscv_vloxseg5ei32_v_u8m1(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u8m1_m(...) __riscv_vloxseg5ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u8mf2(...) __riscv_vloxseg5ei32_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u8mf2_m(...) __riscv_vloxseg5ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u8mf4(...) __riscv_vloxseg5ei32_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u8mf4_m(...) __riscv_vloxseg5ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u8mf8(...) __riscv_vloxseg5ei32_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg5ei32_v_u8mf8_m(...) __riscv_vloxseg5ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f16m1(...) __riscv_vloxseg5ei64_v_f16m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f16m1_m(...) __riscv_vloxseg5ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f16mf2(...) __riscv_vloxseg5ei64_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f16mf2_m(...) __riscv_vloxseg5ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f16mf4(...) __riscv_vloxseg5ei64_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f16mf4_m(...) __riscv_vloxseg5ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f32m1(...) __riscv_vloxseg5ei64_v_f32m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f32m1_m(...) __riscv_vloxseg5ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f32mf2(...) __riscv_vloxseg5ei64_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f32mf2_m(...) __riscv_vloxseg5ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f64m1(...) __riscv_vloxseg5ei64_v_f64m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_f64m1_m(...) __riscv_vloxseg5ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i16m1(...) __riscv_vloxseg5ei64_v_i16m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i16m1_m(...) __riscv_vloxseg5ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i16mf2(...) __riscv_vloxseg5ei64_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i16mf2_m(...) __riscv_vloxseg5ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i16mf4(...) __riscv_vloxseg5ei64_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i16mf4_m(...) __riscv_vloxseg5ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i32m1(...) __riscv_vloxseg5ei64_v_i32m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i32m1_m(...) __riscv_vloxseg5ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i32mf2(...) __riscv_vloxseg5ei64_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i32mf2_m(...) __riscv_vloxseg5ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i64m1(...) __riscv_vloxseg5ei64_v_i64m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i64m1_m(...) __riscv_vloxseg5ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i8m1(...) __riscv_vloxseg5ei64_v_i8m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i8m1_m(...) __riscv_vloxseg5ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i8mf2(...) __riscv_vloxseg5ei64_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i8mf2_m(...) __riscv_vloxseg5ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i8mf4(...) __riscv_vloxseg5ei64_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i8mf4_m(...) __riscv_vloxseg5ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i8mf8(...) __riscv_vloxseg5ei64_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg5ei64_v_i8mf8_m(...) __riscv_vloxseg5ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u16m1(...) __riscv_vloxseg5ei64_v_u16m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u16m1_m(...) __riscv_vloxseg5ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u16mf2(...) __riscv_vloxseg5ei64_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u16mf2_m(...) __riscv_vloxseg5ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u16mf4(...) __riscv_vloxseg5ei64_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u16mf4_m(...) __riscv_vloxseg5ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u32m1(...) __riscv_vloxseg5ei64_v_u32m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u32m1_m(...) __riscv_vloxseg5ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u32mf2(...) __riscv_vloxseg5ei64_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u32mf2_m(...) __riscv_vloxseg5ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u64m1(...) __riscv_vloxseg5ei64_v_u64m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u64m1_m(...) __riscv_vloxseg5ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u8m1(...) __riscv_vloxseg5ei64_v_u8m1(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u8m1_m(...) __riscv_vloxseg5ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u8mf2(...) __riscv_vloxseg5ei64_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u8mf2_m(...) __riscv_vloxseg5ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u8mf4(...) __riscv_vloxseg5ei64_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u8mf4_m(...) __riscv_vloxseg5ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u8mf8(...) __riscv_vloxseg5ei64_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg5ei64_v_u8mf8_m(...) __riscv_vloxseg5ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f16m1(...) __riscv_vloxseg5ei8_v_f16m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f16m1_m(...) __riscv_vloxseg5ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f16mf2(...) __riscv_vloxseg5ei8_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f16mf2_m(...) __riscv_vloxseg5ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f16mf4(...) __riscv_vloxseg5ei8_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f16mf4_m(...) __riscv_vloxseg5ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f32m1(...) __riscv_vloxseg5ei8_v_f32m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f32m1_m(...) __riscv_vloxseg5ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f32mf2(...) __riscv_vloxseg5ei8_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f32mf2_m(...) __riscv_vloxseg5ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f64m1(...) __riscv_vloxseg5ei8_v_f64m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_f64m1_m(...) __riscv_vloxseg5ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i16m1(...) __riscv_vloxseg5ei8_v_i16m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i16m1_m(...) __riscv_vloxseg5ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i16mf2(...) __riscv_vloxseg5ei8_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i16mf2_m(...) __riscv_vloxseg5ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i16mf4(...) __riscv_vloxseg5ei8_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i16mf4_m(...) __riscv_vloxseg5ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i32m1(...) __riscv_vloxseg5ei8_v_i32m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i32m1_m(...) __riscv_vloxseg5ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i32mf2(...) __riscv_vloxseg5ei8_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i32mf2_m(...) __riscv_vloxseg5ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i64m1(...) __riscv_vloxseg5ei8_v_i64m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i64m1_m(...) __riscv_vloxseg5ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i8m1(...) __riscv_vloxseg5ei8_v_i8m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i8m1_m(...) __riscv_vloxseg5ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i8mf2(...) __riscv_vloxseg5ei8_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i8mf2_m(...) __riscv_vloxseg5ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i8mf4(...) __riscv_vloxseg5ei8_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i8mf4_m(...) __riscv_vloxseg5ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i8mf8(...) __riscv_vloxseg5ei8_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg5ei8_v_i8mf8_m(...) __riscv_vloxseg5ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u16m1(...) __riscv_vloxseg5ei8_v_u16m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u16m1_m(...) __riscv_vloxseg5ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u16mf2(...) __riscv_vloxseg5ei8_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u16mf2_m(...) __riscv_vloxseg5ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u16mf4(...) __riscv_vloxseg5ei8_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u16mf4_m(...) __riscv_vloxseg5ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u32m1(...) __riscv_vloxseg5ei8_v_u32m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u32m1_m(...) __riscv_vloxseg5ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u32mf2(...) __riscv_vloxseg5ei8_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u32mf2_m(...) __riscv_vloxseg5ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u64m1(...) __riscv_vloxseg5ei8_v_u64m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u64m1_m(...) __riscv_vloxseg5ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u8m1(...) __riscv_vloxseg5ei8_v_u8m1(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u8m1_m(...) __riscv_vloxseg5ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u8mf2(...) __riscv_vloxseg5ei8_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u8mf2_m(...) __riscv_vloxseg5ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u8mf4(...) __riscv_vloxseg5ei8_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u8mf4_m(...) __riscv_vloxseg5ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u8mf8(...) __riscv_vloxseg5ei8_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg5ei8_v_u8mf8_m(...) __riscv_vloxseg5ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f16m1(...) __riscv_vloxseg6ei16_v_f16m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f16m1_m(...) __riscv_vloxseg6ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f16mf2(...) __riscv_vloxseg6ei16_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f16mf2_m(...) __riscv_vloxseg6ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f16mf4(...) __riscv_vloxseg6ei16_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f16mf4_m(...) __riscv_vloxseg6ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f32m1(...) __riscv_vloxseg6ei16_v_f32m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f32m1_m(...) __riscv_vloxseg6ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f32mf2(...) __riscv_vloxseg6ei16_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f32mf2_m(...) __riscv_vloxseg6ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f64m1(...) __riscv_vloxseg6ei16_v_f64m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_f64m1_m(...) __riscv_vloxseg6ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i16m1(...) __riscv_vloxseg6ei16_v_i16m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i16m1_m(...) __riscv_vloxseg6ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i16mf2(...) __riscv_vloxseg6ei16_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i16mf2_m(...) __riscv_vloxseg6ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i16mf4(...) __riscv_vloxseg6ei16_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i16mf4_m(...) __riscv_vloxseg6ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i32m1(...) __riscv_vloxseg6ei16_v_i32m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i32m1_m(...) __riscv_vloxseg6ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i32mf2(...) __riscv_vloxseg6ei16_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i32mf2_m(...) __riscv_vloxseg6ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i64m1(...) __riscv_vloxseg6ei16_v_i64m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i64m1_m(...) __riscv_vloxseg6ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i8m1(...) __riscv_vloxseg6ei16_v_i8m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i8m1_m(...) __riscv_vloxseg6ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i8mf2(...) __riscv_vloxseg6ei16_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i8mf2_m(...) __riscv_vloxseg6ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i8mf4(...) __riscv_vloxseg6ei16_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i8mf4_m(...) __riscv_vloxseg6ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i8mf8(...) __riscv_vloxseg6ei16_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg6ei16_v_i8mf8_m(...) __riscv_vloxseg6ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u16m1(...) __riscv_vloxseg6ei16_v_u16m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u16m1_m(...) __riscv_vloxseg6ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u16mf2(...) __riscv_vloxseg6ei16_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u16mf2_m(...) __riscv_vloxseg6ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u16mf4(...) __riscv_vloxseg6ei16_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u16mf4_m(...) __riscv_vloxseg6ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u32m1(...) __riscv_vloxseg6ei16_v_u32m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u32m1_m(...) __riscv_vloxseg6ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u32mf2(...) __riscv_vloxseg6ei16_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u32mf2_m(...) __riscv_vloxseg6ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u64m1(...) __riscv_vloxseg6ei16_v_u64m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u64m1_m(...) __riscv_vloxseg6ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u8m1(...) __riscv_vloxseg6ei16_v_u8m1(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u8m1_m(...) __riscv_vloxseg6ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u8mf2(...) __riscv_vloxseg6ei16_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u8mf2_m(...) __riscv_vloxseg6ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u8mf4(...) __riscv_vloxseg6ei16_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u8mf4_m(...) __riscv_vloxseg6ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u8mf8(...) __riscv_vloxseg6ei16_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg6ei16_v_u8mf8_m(...) __riscv_vloxseg6ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f16m1(...) __riscv_vloxseg6ei32_v_f16m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f16m1_m(...) __riscv_vloxseg6ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f16mf2(...) __riscv_vloxseg6ei32_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f16mf2_m(...) __riscv_vloxseg6ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f16mf4(...) __riscv_vloxseg6ei32_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f16mf4_m(...) __riscv_vloxseg6ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f32m1(...) __riscv_vloxseg6ei32_v_f32m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f32m1_m(...) __riscv_vloxseg6ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f32mf2(...) __riscv_vloxseg6ei32_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f32mf2_m(...) __riscv_vloxseg6ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f64m1(...) __riscv_vloxseg6ei32_v_f64m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_f64m1_m(...) __riscv_vloxseg6ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i16m1(...) __riscv_vloxseg6ei32_v_i16m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i16m1_m(...) __riscv_vloxseg6ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i16mf2(...) __riscv_vloxseg6ei32_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i16mf2_m(...) __riscv_vloxseg6ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i16mf4(...) __riscv_vloxseg6ei32_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i16mf4_m(...) __riscv_vloxseg6ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i32m1(...) __riscv_vloxseg6ei32_v_i32m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i32m1_m(...) __riscv_vloxseg6ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i32mf2(...) __riscv_vloxseg6ei32_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i32mf2_m(...) __riscv_vloxseg6ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i64m1(...) __riscv_vloxseg6ei32_v_i64m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i64m1_m(...) __riscv_vloxseg6ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i8m1(...) __riscv_vloxseg6ei32_v_i8m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i8m1_m(...) __riscv_vloxseg6ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i8mf2(...) __riscv_vloxseg6ei32_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i8mf2_m(...) __riscv_vloxseg6ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i8mf4(...) __riscv_vloxseg6ei32_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i8mf4_m(...) __riscv_vloxseg6ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i8mf8(...) __riscv_vloxseg6ei32_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg6ei32_v_i8mf8_m(...) __riscv_vloxseg6ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u16m1(...) __riscv_vloxseg6ei32_v_u16m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u16m1_m(...) __riscv_vloxseg6ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u16mf2(...) __riscv_vloxseg6ei32_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u16mf2_m(...) __riscv_vloxseg6ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u16mf4(...) __riscv_vloxseg6ei32_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u16mf4_m(...) __riscv_vloxseg6ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u32m1(...) __riscv_vloxseg6ei32_v_u32m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u32m1_m(...) __riscv_vloxseg6ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u32mf2(...) __riscv_vloxseg6ei32_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u32mf2_m(...) __riscv_vloxseg6ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u64m1(...) __riscv_vloxseg6ei32_v_u64m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u64m1_m(...) __riscv_vloxseg6ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u8m1(...) __riscv_vloxseg6ei32_v_u8m1(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u8m1_m(...) __riscv_vloxseg6ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u8mf2(...) __riscv_vloxseg6ei32_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u8mf2_m(...) __riscv_vloxseg6ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u8mf4(...) __riscv_vloxseg6ei32_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u8mf4_m(...) __riscv_vloxseg6ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u8mf8(...) __riscv_vloxseg6ei32_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg6ei32_v_u8mf8_m(...) __riscv_vloxseg6ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f16m1(...) __riscv_vloxseg6ei64_v_f16m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f16m1_m(...) __riscv_vloxseg6ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f16mf2(...) __riscv_vloxseg6ei64_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f16mf2_m(...) __riscv_vloxseg6ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f16mf4(...) __riscv_vloxseg6ei64_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f16mf4_m(...) __riscv_vloxseg6ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f32m1(...) __riscv_vloxseg6ei64_v_f32m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f32m1_m(...) __riscv_vloxseg6ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f32mf2(...) __riscv_vloxseg6ei64_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f32mf2_m(...) __riscv_vloxseg6ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f64m1(...) __riscv_vloxseg6ei64_v_f64m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_f64m1_m(...) __riscv_vloxseg6ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i16m1(...) __riscv_vloxseg6ei64_v_i16m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i16m1_m(...) __riscv_vloxseg6ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i16mf2(...) __riscv_vloxseg6ei64_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i16mf2_m(...) __riscv_vloxseg6ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i16mf4(...) __riscv_vloxseg6ei64_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i16mf4_m(...) __riscv_vloxseg6ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i32m1(...) __riscv_vloxseg6ei64_v_i32m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i32m1_m(...) __riscv_vloxseg6ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i32mf2(...) __riscv_vloxseg6ei64_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i32mf2_m(...) __riscv_vloxseg6ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i64m1(...) __riscv_vloxseg6ei64_v_i64m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i64m1_m(...) __riscv_vloxseg6ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i8m1(...) __riscv_vloxseg6ei64_v_i8m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i8m1_m(...) __riscv_vloxseg6ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i8mf2(...) __riscv_vloxseg6ei64_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i8mf2_m(...) __riscv_vloxseg6ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i8mf4(...) __riscv_vloxseg6ei64_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i8mf4_m(...) __riscv_vloxseg6ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i8mf8(...) __riscv_vloxseg6ei64_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg6ei64_v_i8mf8_m(...) __riscv_vloxseg6ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u16m1(...) __riscv_vloxseg6ei64_v_u16m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u16m1_m(...) __riscv_vloxseg6ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u16mf2(...) __riscv_vloxseg6ei64_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u16mf2_m(...) __riscv_vloxseg6ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u16mf4(...) __riscv_vloxseg6ei64_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u16mf4_m(...) __riscv_vloxseg6ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u32m1(...) __riscv_vloxseg6ei64_v_u32m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u32m1_m(...) __riscv_vloxseg6ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u32mf2(...) __riscv_vloxseg6ei64_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u32mf2_m(...) __riscv_vloxseg6ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u64m1(...) __riscv_vloxseg6ei64_v_u64m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u64m1_m(...) __riscv_vloxseg6ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u8m1(...) __riscv_vloxseg6ei64_v_u8m1(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u8m1_m(...) __riscv_vloxseg6ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u8mf2(...) __riscv_vloxseg6ei64_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u8mf2_m(...) __riscv_vloxseg6ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u8mf4(...) __riscv_vloxseg6ei64_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u8mf4_m(...) __riscv_vloxseg6ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u8mf8(...) __riscv_vloxseg6ei64_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg6ei64_v_u8mf8_m(...) __riscv_vloxseg6ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f16m1(...) __riscv_vloxseg6ei8_v_f16m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f16m1_m(...) __riscv_vloxseg6ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f16mf2(...) __riscv_vloxseg6ei8_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f16mf2_m(...) __riscv_vloxseg6ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f16mf4(...) __riscv_vloxseg6ei8_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f16mf4_m(...) __riscv_vloxseg6ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f32m1(...) __riscv_vloxseg6ei8_v_f32m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f32m1_m(...) __riscv_vloxseg6ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f32mf2(...) __riscv_vloxseg6ei8_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f32mf2_m(...) __riscv_vloxseg6ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f64m1(...) __riscv_vloxseg6ei8_v_f64m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_f64m1_m(...) __riscv_vloxseg6ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i16m1(...) __riscv_vloxseg6ei8_v_i16m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i16m1_m(...) __riscv_vloxseg6ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i16mf2(...) __riscv_vloxseg6ei8_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i16mf2_m(...) __riscv_vloxseg6ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i16mf4(...) __riscv_vloxseg6ei8_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i16mf4_m(...) __riscv_vloxseg6ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i32m1(...) __riscv_vloxseg6ei8_v_i32m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i32m1_m(...) __riscv_vloxseg6ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i32mf2(...) __riscv_vloxseg6ei8_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i32mf2_m(...) __riscv_vloxseg6ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i64m1(...) __riscv_vloxseg6ei8_v_i64m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i64m1_m(...) __riscv_vloxseg6ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i8m1(...) __riscv_vloxseg6ei8_v_i8m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i8m1_m(...) __riscv_vloxseg6ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i8mf2(...) __riscv_vloxseg6ei8_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i8mf2_m(...) __riscv_vloxseg6ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i8mf4(...) __riscv_vloxseg6ei8_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i8mf4_m(...) __riscv_vloxseg6ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i8mf8(...) __riscv_vloxseg6ei8_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg6ei8_v_i8mf8_m(...) __riscv_vloxseg6ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u16m1(...) __riscv_vloxseg6ei8_v_u16m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u16m1_m(...) __riscv_vloxseg6ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u16mf2(...) __riscv_vloxseg6ei8_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u16mf2_m(...) __riscv_vloxseg6ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u16mf4(...) __riscv_vloxseg6ei8_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u16mf4_m(...) __riscv_vloxseg6ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u32m1(...) __riscv_vloxseg6ei8_v_u32m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u32m1_m(...) __riscv_vloxseg6ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u32mf2(...) __riscv_vloxseg6ei8_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u32mf2_m(...) __riscv_vloxseg6ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u64m1(...) __riscv_vloxseg6ei8_v_u64m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u64m1_m(...) __riscv_vloxseg6ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u8m1(...) __riscv_vloxseg6ei8_v_u8m1(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u8m1_m(...) __riscv_vloxseg6ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u8mf2(...) __riscv_vloxseg6ei8_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u8mf2_m(...) __riscv_vloxseg6ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u8mf4(...) __riscv_vloxseg6ei8_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u8mf4_m(...) __riscv_vloxseg6ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u8mf8(...) __riscv_vloxseg6ei8_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg6ei8_v_u8mf8_m(...) __riscv_vloxseg6ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f16m1(...) __riscv_vloxseg7ei16_v_f16m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f16m1_m(...) __riscv_vloxseg7ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f16mf2(...) __riscv_vloxseg7ei16_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f16mf2_m(...) __riscv_vloxseg7ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f16mf4(...) __riscv_vloxseg7ei16_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f16mf4_m(...) __riscv_vloxseg7ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f32m1(...) __riscv_vloxseg7ei16_v_f32m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f32m1_m(...) __riscv_vloxseg7ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f32mf2(...) __riscv_vloxseg7ei16_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f32mf2_m(...) __riscv_vloxseg7ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f64m1(...) __riscv_vloxseg7ei16_v_f64m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_f64m1_m(...) __riscv_vloxseg7ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i16m1(...) __riscv_vloxseg7ei16_v_i16m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i16m1_m(...) __riscv_vloxseg7ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i16mf2(...) __riscv_vloxseg7ei16_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i16mf2_m(...) __riscv_vloxseg7ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i16mf4(...) __riscv_vloxseg7ei16_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i16mf4_m(...) __riscv_vloxseg7ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i32m1(...) __riscv_vloxseg7ei16_v_i32m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i32m1_m(...) __riscv_vloxseg7ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i32mf2(...) __riscv_vloxseg7ei16_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i32mf2_m(...) __riscv_vloxseg7ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i64m1(...) __riscv_vloxseg7ei16_v_i64m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i64m1_m(...) __riscv_vloxseg7ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i8m1(...) __riscv_vloxseg7ei16_v_i8m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i8m1_m(...) __riscv_vloxseg7ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i8mf2(...) __riscv_vloxseg7ei16_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i8mf2_m(...) __riscv_vloxseg7ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i8mf4(...) __riscv_vloxseg7ei16_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i8mf4_m(...) __riscv_vloxseg7ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i8mf8(...) __riscv_vloxseg7ei16_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg7ei16_v_i8mf8_m(...) __riscv_vloxseg7ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u16m1(...) __riscv_vloxseg7ei16_v_u16m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u16m1_m(...) __riscv_vloxseg7ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u16mf2(...) __riscv_vloxseg7ei16_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u16mf2_m(...) __riscv_vloxseg7ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u16mf4(...) __riscv_vloxseg7ei16_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u16mf4_m(...) __riscv_vloxseg7ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u32m1(...) __riscv_vloxseg7ei16_v_u32m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u32m1_m(...) __riscv_vloxseg7ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u32mf2(...) __riscv_vloxseg7ei16_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u32mf2_m(...) __riscv_vloxseg7ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u64m1(...) __riscv_vloxseg7ei16_v_u64m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u64m1_m(...) __riscv_vloxseg7ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u8m1(...) __riscv_vloxseg7ei16_v_u8m1(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u8m1_m(...) __riscv_vloxseg7ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u8mf2(...) __riscv_vloxseg7ei16_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u8mf2_m(...) __riscv_vloxseg7ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u8mf4(...) __riscv_vloxseg7ei16_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u8mf4_m(...) __riscv_vloxseg7ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u8mf8(...) __riscv_vloxseg7ei16_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg7ei16_v_u8mf8_m(...) __riscv_vloxseg7ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f16m1(...) __riscv_vloxseg7ei32_v_f16m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f16m1_m(...) __riscv_vloxseg7ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f16mf2(...) __riscv_vloxseg7ei32_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f16mf2_m(...) __riscv_vloxseg7ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f16mf4(...) __riscv_vloxseg7ei32_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f16mf4_m(...) __riscv_vloxseg7ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f32m1(...) __riscv_vloxseg7ei32_v_f32m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f32m1_m(...) __riscv_vloxseg7ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f32mf2(...) __riscv_vloxseg7ei32_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f32mf2_m(...) __riscv_vloxseg7ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f64m1(...) __riscv_vloxseg7ei32_v_f64m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_f64m1_m(...) __riscv_vloxseg7ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i16m1(...) __riscv_vloxseg7ei32_v_i16m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i16m1_m(...) __riscv_vloxseg7ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i16mf2(...) __riscv_vloxseg7ei32_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i16mf2_m(...) __riscv_vloxseg7ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i16mf4(...) __riscv_vloxseg7ei32_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i16mf4_m(...) __riscv_vloxseg7ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i32m1(...) __riscv_vloxseg7ei32_v_i32m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i32m1_m(...) __riscv_vloxseg7ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i32mf2(...) __riscv_vloxseg7ei32_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i32mf2_m(...) __riscv_vloxseg7ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i64m1(...) __riscv_vloxseg7ei32_v_i64m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i64m1_m(...) __riscv_vloxseg7ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i8m1(...) __riscv_vloxseg7ei32_v_i8m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i8m1_m(...) __riscv_vloxseg7ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i8mf2(...) __riscv_vloxseg7ei32_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i8mf2_m(...) __riscv_vloxseg7ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i8mf4(...) __riscv_vloxseg7ei32_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i8mf4_m(...) __riscv_vloxseg7ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i8mf8(...) __riscv_vloxseg7ei32_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg7ei32_v_i8mf8_m(...) __riscv_vloxseg7ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u16m1(...) __riscv_vloxseg7ei32_v_u16m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u16m1_m(...) __riscv_vloxseg7ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u16mf2(...) __riscv_vloxseg7ei32_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u16mf2_m(...) __riscv_vloxseg7ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u16mf4(...) __riscv_vloxseg7ei32_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u16mf4_m(...) __riscv_vloxseg7ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u32m1(...) __riscv_vloxseg7ei32_v_u32m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u32m1_m(...) __riscv_vloxseg7ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u32mf2(...) __riscv_vloxseg7ei32_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u32mf2_m(...) __riscv_vloxseg7ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u64m1(...) __riscv_vloxseg7ei32_v_u64m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u64m1_m(...) __riscv_vloxseg7ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u8m1(...) __riscv_vloxseg7ei32_v_u8m1(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u8m1_m(...) __riscv_vloxseg7ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u8mf2(...) __riscv_vloxseg7ei32_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u8mf2_m(...) __riscv_vloxseg7ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u8mf4(...) __riscv_vloxseg7ei32_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u8mf4_m(...) __riscv_vloxseg7ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u8mf8(...) __riscv_vloxseg7ei32_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg7ei32_v_u8mf8_m(...) __riscv_vloxseg7ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f16m1(...) __riscv_vloxseg7ei64_v_f16m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f16m1_m(...) __riscv_vloxseg7ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f16mf2(...) __riscv_vloxseg7ei64_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f16mf2_m(...) __riscv_vloxseg7ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f16mf4(...) __riscv_vloxseg7ei64_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f16mf4_m(...) __riscv_vloxseg7ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f32m1(...) __riscv_vloxseg7ei64_v_f32m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f32m1_m(...) __riscv_vloxseg7ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f32mf2(...) __riscv_vloxseg7ei64_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f32mf2_m(...) __riscv_vloxseg7ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f64m1(...) __riscv_vloxseg7ei64_v_f64m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_f64m1_m(...) __riscv_vloxseg7ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i16m1(...) __riscv_vloxseg7ei64_v_i16m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i16m1_m(...) __riscv_vloxseg7ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i16mf2(...) __riscv_vloxseg7ei64_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i16mf2_m(...) __riscv_vloxseg7ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i16mf4(...) __riscv_vloxseg7ei64_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i16mf4_m(...) __riscv_vloxseg7ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i32m1(...) __riscv_vloxseg7ei64_v_i32m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i32m1_m(...) __riscv_vloxseg7ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i32mf2(...) __riscv_vloxseg7ei64_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i32mf2_m(...) __riscv_vloxseg7ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i64m1(...) __riscv_vloxseg7ei64_v_i64m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i64m1_m(...) __riscv_vloxseg7ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i8m1(...) __riscv_vloxseg7ei64_v_i8m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i8m1_m(...) __riscv_vloxseg7ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i8mf2(...) __riscv_vloxseg7ei64_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i8mf2_m(...) __riscv_vloxseg7ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i8mf4(...) __riscv_vloxseg7ei64_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i8mf4_m(...) __riscv_vloxseg7ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i8mf8(...) __riscv_vloxseg7ei64_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg7ei64_v_i8mf8_m(...) __riscv_vloxseg7ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u16m1(...) __riscv_vloxseg7ei64_v_u16m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u16m1_m(...) __riscv_vloxseg7ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u16mf2(...) __riscv_vloxseg7ei64_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u16mf2_m(...) __riscv_vloxseg7ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u16mf4(...) __riscv_vloxseg7ei64_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u16mf4_m(...) __riscv_vloxseg7ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u32m1(...) __riscv_vloxseg7ei64_v_u32m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u32m1_m(...) __riscv_vloxseg7ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u32mf2(...) __riscv_vloxseg7ei64_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u32mf2_m(...) __riscv_vloxseg7ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u64m1(...) __riscv_vloxseg7ei64_v_u64m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u64m1_m(...) __riscv_vloxseg7ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u8m1(...) __riscv_vloxseg7ei64_v_u8m1(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u8m1_m(...) __riscv_vloxseg7ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u8mf2(...) __riscv_vloxseg7ei64_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u8mf2_m(...) __riscv_vloxseg7ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u8mf4(...) __riscv_vloxseg7ei64_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u8mf4_m(...) __riscv_vloxseg7ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u8mf8(...) __riscv_vloxseg7ei64_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg7ei64_v_u8mf8_m(...) __riscv_vloxseg7ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f16m1(...) __riscv_vloxseg7ei8_v_f16m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f16m1_m(...) __riscv_vloxseg7ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f16mf2(...) __riscv_vloxseg7ei8_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f16mf2_m(...) __riscv_vloxseg7ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f16mf4(...) __riscv_vloxseg7ei8_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f16mf4_m(...) __riscv_vloxseg7ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f32m1(...) __riscv_vloxseg7ei8_v_f32m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f32m1_m(...) __riscv_vloxseg7ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f32mf2(...) __riscv_vloxseg7ei8_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f32mf2_m(...) __riscv_vloxseg7ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f64m1(...) __riscv_vloxseg7ei8_v_f64m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_f64m1_m(...) __riscv_vloxseg7ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i16m1(...) __riscv_vloxseg7ei8_v_i16m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i16m1_m(...) __riscv_vloxseg7ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i16mf2(...) __riscv_vloxseg7ei8_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i16mf2_m(...) __riscv_vloxseg7ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i16mf4(...) __riscv_vloxseg7ei8_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i16mf4_m(...) __riscv_vloxseg7ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i32m1(...) __riscv_vloxseg7ei8_v_i32m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i32m1_m(...) __riscv_vloxseg7ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i32mf2(...) __riscv_vloxseg7ei8_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i32mf2_m(...) __riscv_vloxseg7ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i64m1(...) __riscv_vloxseg7ei8_v_i64m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i64m1_m(...) __riscv_vloxseg7ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i8m1(...) __riscv_vloxseg7ei8_v_i8m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i8m1_m(...) __riscv_vloxseg7ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i8mf2(...) __riscv_vloxseg7ei8_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i8mf2_m(...) __riscv_vloxseg7ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i8mf4(...) __riscv_vloxseg7ei8_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i8mf4_m(...) __riscv_vloxseg7ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i8mf8(...) __riscv_vloxseg7ei8_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg7ei8_v_i8mf8_m(...) __riscv_vloxseg7ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u16m1(...) __riscv_vloxseg7ei8_v_u16m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u16m1_m(...) __riscv_vloxseg7ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u16mf2(...) __riscv_vloxseg7ei8_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u16mf2_m(...) __riscv_vloxseg7ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u16mf4(...) __riscv_vloxseg7ei8_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u16mf4_m(...) __riscv_vloxseg7ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u32m1(...) __riscv_vloxseg7ei8_v_u32m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u32m1_m(...) __riscv_vloxseg7ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u32mf2(...) __riscv_vloxseg7ei8_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u32mf2_m(...) __riscv_vloxseg7ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u64m1(...) __riscv_vloxseg7ei8_v_u64m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u64m1_m(...) __riscv_vloxseg7ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u8m1(...) __riscv_vloxseg7ei8_v_u8m1(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u8m1_m(...) __riscv_vloxseg7ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u8mf2(...) __riscv_vloxseg7ei8_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u8mf2_m(...) __riscv_vloxseg7ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u8mf4(...) __riscv_vloxseg7ei8_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u8mf4_m(...) __riscv_vloxseg7ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u8mf8(...) __riscv_vloxseg7ei8_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg7ei8_v_u8mf8_m(...) __riscv_vloxseg7ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f16m1(...) __riscv_vloxseg8ei16_v_f16m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f16m1_m(...) __riscv_vloxseg8ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f16mf2(...) __riscv_vloxseg8ei16_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f16mf2_m(...) __riscv_vloxseg8ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f16mf4(...) __riscv_vloxseg8ei16_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f16mf4_m(...) __riscv_vloxseg8ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f32m1(...) __riscv_vloxseg8ei16_v_f32m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f32m1_m(...) __riscv_vloxseg8ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f32mf2(...) __riscv_vloxseg8ei16_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f32mf2_m(...) __riscv_vloxseg8ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f64m1(...) __riscv_vloxseg8ei16_v_f64m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_f64m1_m(...) __riscv_vloxseg8ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i16m1(...) __riscv_vloxseg8ei16_v_i16m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i16m1_m(...) __riscv_vloxseg8ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i16mf2(...) __riscv_vloxseg8ei16_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i16mf2_m(...) __riscv_vloxseg8ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i16mf4(...) __riscv_vloxseg8ei16_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i16mf4_m(...) __riscv_vloxseg8ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i32m1(...) __riscv_vloxseg8ei16_v_i32m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i32m1_m(...) __riscv_vloxseg8ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i32mf2(...) __riscv_vloxseg8ei16_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i32mf2_m(...) __riscv_vloxseg8ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i64m1(...) __riscv_vloxseg8ei16_v_i64m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i64m1_m(...) __riscv_vloxseg8ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i8m1(...) __riscv_vloxseg8ei16_v_i8m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i8m1_m(...) __riscv_vloxseg8ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i8mf2(...) __riscv_vloxseg8ei16_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i8mf2_m(...) __riscv_vloxseg8ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i8mf4(...) __riscv_vloxseg8ei16_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i8mf4_m(...) __riscv_vloxseg8ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i8mf8(...) __riscv_vloxseg8ei16_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg8ei16_v_i8mf8_m(...) __riscv_vloxseg8ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u16m1(...) __riscv_vloxseg8ei16_v_u16m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u16m1_m(...) __riscv_vloxseg8ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u16mf2(...) __riscv_vloxseg8ei16_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u16mf2_m(...) __riscv_vloxseg8ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u16mf4(...) __riscv_vloxseg8ei16_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u16mf4_m(...) __riscv_vloxseg8ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u32m1(...) __riscv_vloxseg8ei16_v_u32m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u32m1_m(...) __riscv_vloxseg8ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u32mf2(...) __riscv_vloxseg8ei16_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u32mf2_m(...) __riscv_vloxseg8ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u64m1(...) __riscv_vloxseg8ei16_v_u64m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u64m1_m(...) __riscv_vloxseg8ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u8m1(...) __riscv_vloxseg8ei16_v_u8m1(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u8m1_m(...) __riscv_vloxseg8ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u8mf2(...) __riscv_vloxseg8ei16_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u8mf2_m(...) __riscv_vloxseg8ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u8mf4(...) __riscv_vloxseg8ei16_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u8mf4_m(...) __riscv_vloxseg8ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u8mf8(...) __riscv_vloxseg8ei16_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg8ei16_v_u8mf8_m(...) __riscv_vloxseg8ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f16m1(...) __riscv_vloxseg8ei32_v_f16m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f16m1_m(...) __riscv_vloxseg8ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f16mf2(...) __riscv_vloxseg8ei32_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f16mf2_m(...) __riscv_vloxseg8ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f16mf4(...) __riscv_vloxseg8ei32_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f16mf4_m(...) __riscv_vloxseg8ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f32m1(...) __riscv_vloxseg8ei32_v_f32m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f32m1_m(...) __riscv_vloxseg8ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f32mf2(...) __riscv_vloxseg8ei32_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f32mf2_m(...) __riscv_vloxseg8ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f64m1(...) __riscv_vloxseg8ei32_v_f64m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_f64m1_m(...) __riscv_vloxseg8ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i16m1(...) __riscv_vloxseg8ei32_v_i16m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i16m1_m(...) __riscv_vloxseg8ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i16mf2(...) __riscv_vloxseg8ei32_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i16mf2_m(...) __riscv_vloxseg8ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i16mf4(...) __riscv_vloxseg8ei32_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i16mf4_m(...) __riscv_vloxseg8ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i32m1(...) __riscv_vloxseg8ei32_v_i32m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i32m1_m(...) __riscv_vloxseg8ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i32mf2(...) __riscv_vloxseg8ei32_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i32mf2_m(...) __riscv_vloxseg8ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i64m1(...) __riscv_vloxseg8ei32_v_i64m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i64m1_m(...) __riscv_vloxseg8ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i8m1(...) __riscv_vloxseg8ei32_v_i8m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i8m1_m(...) __riscv_vloxseg8ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i8mf2(...) __riscv_vloxseg8ei32_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i8mf2_m(...) __riscv_vloxseg8ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i8mf4(...) __riscv_vloxseg8ei32_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i8mf4_m(...) __riscv_vloxseg8ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i8mf8(...) __riscv_vloxseg8ei32_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg8ei32_v_i8mf8_m(...) __riscv_vloxseg8ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u16m1(...) __riscv_vloxseg8ei32_v_u16m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u16m1_m(...) __riscv_vloxseg8ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u16mf2(...) __riscv_vloxseg8ei32_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u16mf2_m(...) __riscv_vloxseg8ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u16mf4(...) __riscv_vloxseg8ei32_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u16mf4_m(...) __riscv_vloxseg8ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u32m1(...) __riscv_vloxseg8ei32_v_u32m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u32m1_m(...) __riscv_vloxseg8ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u32mf2(...) __riscv_vloxseg8ei32_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u32mf2_m(...) __riscv_vloxseg8ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u64m1(...) __riscv_vloxseg8ei32_v_u64m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u64m1_m(...) __riscv_vloxseg8ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u8m1(...) __riscv_vloxseg8ei32_v_u8m1(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u8m1_m(...) __riscv_vloxseg8ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u8mf2(...) __riscv_vloxseg8ei32_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u8mf2_m(...) __riscv_vloxseg8ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u8mf4(...) __riscv_vloxseg8ei32_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u8mf4_m(...) __riscv_vloxseg8ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u8mf8(...) __riscv_vloxseg8ei32_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg8ei32_v_u8mf8_m(...) __riscv_vloxseg8ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f16m1(...) __riscv_vloxseg8ei64_v_f16m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f16m1_m(...) __riscv_vloxseg8ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f16mf2(...) __riscv_vloxseg8ei64_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f16mf2_m(...) __riscv_vloxseg8ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f16mf4(...) __riscv_vloxseg8ei64_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f16mf4_m(...) __riscv_vloxseg8ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f32m1(...) __riscv_vloxseg8ei64_v_f32m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f32m1_m(...) __riscv_vloxseg8ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f32mf2(...) __riscv_vloxseg8ei64_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f32mf2_m(...) __riscv_vloxseg8ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f64m1(...) __riscv_vloxseg8ei64_v_f64m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_f64m1_m(...) __riscv_vloxseg8ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i16m1(...) __riscv_vloxseg8ei64_v_i16m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i16m1_m(...) __riscv_vloxseg8ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i16mf2(...) __riscv_vloxseg8ei64_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i16mf2_m(...) __riscv_vloxseg8ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i16mf4(...) __riscv_vloxseg8ei64_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i16mf4_m(...) __riscv_vloxseg8ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i32m1(...) __riscv_vloxseg8ei64_v_i32m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i32m1_m(...) __riscv_vloxseg8ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i32mf2(...) __riscv_vloxseg8ei64_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i32mf2_m(...) __riscv_vloxseg8ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i64m1(...) __riscv_vloxseg8ei64_v_i64m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i64m1_m(...) __riscv_vloxseg8ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i8m1(...) __riscv_vloxseg8ei64_v_i8m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i8m1_m(...) __riscv_vloxseg8ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i8mf2(...) __riscv_vloxseg8ei64_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i8mf2_m(...) __riscv_vloxseg8ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i8mf4(...) __riscv_vloxseg8ei64_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i8mf4_m(...) __riscv_vloxseg8ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i8mf8(...) __riscv_vloxseg8ei64_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg8ei64_v_i8mf8_m(...) __riscv_vloxseg8ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u16m1(...) __riscv_vloxseg8ei64_v_u16m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u16m1_m(...) __riscv_vloxseg8ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u16mf2(...) __riscv_vloxseg8ei64_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u16mf2_m(...) __riscv_vloxseg8ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u16mf4(...) __riscv_vloxseg8ei64_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u16mf4_m(...) __riscv_vloxseg8ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u32m1(...) __riscv_vloxseg8ei64_v_u32m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u32m1_m(...) __riscv_vloxseg8ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u32mf2(...) __riscv_vloxseg8ei64_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u32mf2_m(...) __riscv_vloxseg8ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u64m1(...) __riscv_vloxseg8ei64_v_u64m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u64m1_m(...) __riscv_vloxseg8ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u8m1(...) __riscv_vloxseg8ei64_v_u8m1(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u8m1_m(...) __riscv_vloxseg8ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u8mf2(...) __riscv_vloxseg8ei64_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u8mf2_m(...) __riscv_vloxseg8ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u8mf4(...) __riscv_vloxseg8ei64_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u8mf4_m(...) __riscv_vloxseg8ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u8mf8(...) __riscv_vloxseg8ei64_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg8ei64_v_u8mf8_m(...) __riscv_vloxseg8ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f16m1(...) __riscv_vloxseg8ei8_v_f16m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f16m1_m(...) __riscv_vloxseg8ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f16mf2(...) __riscv_vloxseg8ei8_v_f16mf2(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f16mf2_m(...) __riscv_vloxseg8ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f16mf4(...) __riscv_vloxseg8ei8_v_f16mf4(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f16mf4_m(...) __riscv_vloxseg8ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f32m1(...) __riscv_vloxseg8ei8_v_f32m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f32m1_m(...) __riscv_vloxseg8ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f32mf2(...) __riscv_vloxseg8ei8_v_f32mf2(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f32mf2_m(...) __riscv_vloxseg8ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f64m1(...) __riscv_vloxseg8ei8_v_f64m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_f64m1_m(...) __riscv_vloxseg8ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i16m1(...) __riscv_vloxseg8ei8_v_i16m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i16m1_m(...) __riscv_vloxseg8ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i16mf2(...) __riscv_vloxseg8ei8_v_i16mf2(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i16mf2_m(...) __riscv_vloxseg8ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i16mf4(...) __riscv_vloxseg8ei8_v_i16mf4(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i16mf4_m(...) __riscv_vloxseg8ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i32m1(...) __riscv_vloxseg8ei8_v_i32m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i32m1_m(...) __riscv_vloxseg8ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i32mf2(...) __riscv_vloxseg8ei8_v_i32mf2(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i32mf2_m(...) __riscv_vloxseg8ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i64m1(...) __riscv_vloxseg8ei8_v_i64m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i64m1_m(...) __riscv_vloxseg8ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i8m1(...) __riscv_vloxseg8ei8_v_i8m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i8m1_m(...) __riscv_vloxseg8ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i8mf2(...) __riscv_vloxseg8ei8_v_i8mf2(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i8mf2_m(...) __riscv_vloxseg8ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i8mf4(...) __riscv_vloxseg8ei8_v_i8mf4(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i8mf4_m(...) __riscv_vloxseg8ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i8mf8(...) __riscv_vloxseg8ei8_v_i8mf8(__VA_ARGS__) |
| #define | vloxseg8ei8_v_i8mf8_m(...) __riscv_vloxseg8ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u16m1(...) __riscv_vloxseg8ei8_v_u16m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u16m1_m(...) __riscv_vloxseg8ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u16mf2(...) __riscv_vloxseg8ei8_v_u16mf2(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u16mf2_m(...) __riscv_vloxseg8ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u16mf4(...) __riscv_vloxseg8ei8_v_u16mf4(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u16mf4_m(...) __riscv_vloxseg8ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u32m1(...) __riscv_vloxseg8ei8_v_u32m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u32m1_m(...) __riscv_vloxseg8ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u32mf2(...) __riscv_vloxseg8ei8_v_u32mf2(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u32mf2_m(...) __riscv_vloxseg8ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u64m1(...) __riscv_vloxseg8ei8_v_u64m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u64m1_m(...) __riscv_vloxseg8ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u8m1(...) __riscv_vloxseg8ei8_v_u8m1(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u8m1_m(...) __riscv_vloxseg8ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u8mf2(...) __riscv_vloxseg8ei8_v_u8mf2(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u8mf2_m(...) __riscv_vloxseg8ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u8mf4(...) __riscv_vloxseg8ei8_v_u8mf4(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u8mf4_m(...) __riscv_vloxseg8ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u8mf8(...) __riscv_vloxseg8ei8_v_u8mf8(__VA_ARGS__) |
| #define | vloxseg8ei8_v_u8mf8_m(...) __riscv_vloxseg8ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlse16_v_f16m1(...) __riscv_vlse16_v_f16m1(__VA_ARGS__) |
| #define | vlse16_v_f16m1_m(...) __riscv_vlse16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlse16_v_f16m2(...) __riscv_vlse16_v_f16m2(__VA_ARGS__) |
| #define | vlse16_v_f16m2_m(...) __riscv_vlse16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlse16_v_f16m4(...) __riscv_vlse16_v_f16m4(__VA_ARGS__) |
| #define | vlse16_v_f16m4_m(...) __riscv_vlse16_v_f16m4_tumu(__VA_ARGS__) |
| #define | vlse16_v_f16m8(...) __riscv_vlse16_v_f16m8(__VA_ARGS__) |
| #define | vlse16_v_f16m8_m(...) __riscv_vlse16_v_f16m8_tumu(__VA_ARGS__) |
| #define | vlse16_v_f16mf2(...) __riscv_vlse16_v_f16mf2(__VA_ARGS__) |
| #define | vlse16_v_f16mf2_m(...) __riscv_vlse16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlse16_v_f16mf4(...) __riscv_vlse16_v_f16mf4(__VA_ARGS__) |
| #define | vlse16_v_f16mf4_m(...) __riscv_vlse16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlse16_v_i16m1(...) __riscv_vlse16_v_i16m1(__VA_ARGS__) |
| #define | vlse16_v_i16m1_m(...) __riscv_vlse16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlse16_v_i16m2(...) __riscv_vlse16_v_i16m2(__VA_ARGS__) |
| #define | vlse16_v_i16m2_m(...) __riscv_vlse16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlse16_v_i16m4(...) __riscv_vlse16_v_i16m4(__VA_ARGS__) |
| #define | vlse16_v_i16m4_m(...) __riscv_vlse16_v_i16m4_tumu(__VA_ARGS__) |
| #define | vlse16_v_i16m8(...) __riscv_vlse16_v_i16m8(__VA_ARGS__) |
| #define | vlse16_v_i16m8_m(...) __riscv_vlse16_v_i16m8_tumu(__VA_ARGS__) |
| #define | vlse16_v_i16mf2(...) __riscv_vlse16_v_i16mf2(__VA_ARGS__) |
| #define | vlse16_v_i16mf2_m(...) __riscv_vlse16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlse16_v_i16mf4(...) __riscv_vlse16_v_i16mf4(__VA_ARGS__) |
| #define | vlse16_v_i16mf4_m(...) __riscv_vlse16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlse16_v_u16m1(...) __riscv_vlse16_v_u16m1(__VA_ARGS__) |
| #define | vlse16_v_u16m1_m(...) __riscv_vlse16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlse16_v_u16m2(...) __riscv_vlse16_v_u16m2(__VA_ARGS__) |
| #define | vlse16_v_u16m2_m(...) __riscv_vlse16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlse16_v_u16m4(...) __riscv_vlse16_v_u16m4(__VA_ARGS__) |
| #define | vlse16_v_u16m4_m(...) __riscv_vlse16_v_u16m4_tumu(__VA_ARGS__) |
| #define | vlse16_v_u16m8(...) __riscv_vlse16_v_u16m8(__VA_ARGS__) |
| #define | vlse16_v_u16m8_m(...) __riscv_vlse16_v_u16m8_tumu(__VA_ARGS__) |
| #define | vlse16_v_u16mf2(...) __riscv_vlse16_v_u16mf2(__VA_ARGS__) |
| #define | vlse16_v_u16mf2_m(...) __riscv_vlse16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlse16_v_u16mf4(...) __riscv_vlse16_v_u16mf4(__VA_ARGS__) |
| #define | vlse16_v_u16mf4_m(...) __riscv_vlse16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlse32_v_f32m1(...) __riscv_vlse32_v_f32m1(__VA_ARGS__) |
| #define | vlse32_v_f32m1_m(...) __riscv_vlse32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlse32_v_f32m2(...) __riscv_vlse32_v_f32m2(__VA_ARGS__) |
| #define | vlse32_v_f32m2_m(...) __riscv_vlse32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlse32_v_f32m4(...) __riscv_vlse32_v_f32m4(__VA_ARGS__) |
| #define | vlse32_v_f32m4_m(...) __riscv_vlse32_v_f32m4_tumu(__VA_ARGS__) |
| #define | vlse32_v_f32m8(...) __riscv_vlse32_v_f32m8(__VA_ARGS__) |
| #define | vlse32_v_f32m8_m(...) __riscv_vlse32_v_f32m8_tumu(__VA_ARGS__) |
| #define | vlse32_v_f32mf2(...) __riscv_vlse32_v_f32mf2(__VA_ARGS__) |
| #define | vlse32_v_f32mf2_m(...) __riscv_vlse32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlse32_v_i32m1(...) __riscv_vlse32_v_i32m1(__VA_ARGS__) |
| #define | vlse32_v_i32m1_m(...) __riscv_vlse32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlse32_v_i32m2(...) __riscv_vlse32_v_i32m2(__VA_ARGS__) |
| #define | vlse32_v_i32m2_m(...) __riscv_vlse32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlse32_v_i32m4(...) __riscv_vlse32_v_i32m4(__VA_ARGS__) |
| #define | vlse32_v_i32m4_m(...) __riscv_vlse32_v_i32m4_tumu(__VA_ARGS__) |
| #define | vlse32_v_i32m8(...) __riscv_vlse32_v_i32m8(__VA_ARGS__) |
| #define | vlse32_v_i32m8_m(...) __riscv_vlse32_v_i32m8_tumu(__VA_ARGS__) |
| #define | vlse32_v_i32mf2(...) __riscv_vlse32_v_i32mf2(__VA_ARGS__) |
| #define | vlse32_v_i32mf2_m(...) __riscv_vlse32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlse32_v_u32m1(...) __riscv_vlse32_v_u32m1(__VA_ARGS__) |
| #define | vlse32_v_u32m1_m(...) __riscv_vlse32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlse32_v_u32m2(...) __riscv_vlse32_v_u32m2(__VA_ARGS__) |
| #define | vlse32_v_u32m2_m(...) __riscv_vlse32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlse32_v_u32m4(...) __riscv_vlse32_v_u32m4(__VA_ARGS__) |
| #define | vlse32_v_u32m4_m(...) __riscv_vlse32_v_u32m4_tumu(__VA_ARGS__) |
| #define | vlse32_v_u32m8(...) __riscv_vlse32_v_u32m8(__VA_ARGS__) |
| #define | vlse32_v_u32m8_m(...) __riscv_vlse32_v_u32m8_tumu(__VA_ARGS__) |
| #define | vlse32_v_u32mf2(...) __riscv_vlse32_v_u32mf2(__VA_ARGS__) |
| #define | vlse32_v_u32mf2_m(...) __riscv_vlse32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlse64_v_f64m1(...) __riscv_vlse64_v_f64m1(__VA_ARGS__) |
| #define | vlse64_v_f64m1_m(...) __riscv_vlse64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlse64_v_f64m2(...) __riscv_vlse64_v_f64m2(__VA_ARGS__) |
| #define | vlse64_v_f64m2_m(...) __riscv_vlse64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlse64_v_f64m4(...) __riscv_vlse64_v_f64m4(__VA_ARGS__) |
| #define | vlse64_v_f64m4_m(...) __riscv_vlse64_v_f64m4_tumu(__VA_ARGS__) |
| #define | vlse64_v_f64m8(...) __riscv_vlse64_v_f64m8(__VA_ARGS__) |
| #define | vlse64_v_f64m8_m(...) __riscv_vlse64_v_f64m8_tumu(__VA_ARGS__) |
| #define | vlse64_v_i64m1(...) __riscv_vlse64_v_i64m1(__VA_ARGS__) |
| #define | vlse64_v_i64m1_m(...) __riscv_vlse64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlse64_v_i64m2(...) __riscv_vlse64_v_i64m2(__VA_ARGS__) |
| #define | vlse64_v_i64m2_m(...) __riscv_vlse64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlse64_v_i64m4(...) __riscv_vlse64_v_i64m4(__VA_ARGS__) |
| #define | vlse64_v_i64m4_m(...) __riscv_vlse64_v_i64m4_tumu(__VA_ARGS__) |
| #define | vlse64_v_i64m8(...) __riscv_vlse64_v_i64m8(__VA_ARGS__) |
| #define | vlse64_v_i64m8_m(...) __riscv_vlse64_v_i64m8_tumu(__VA_ARGS__) |
| #define | vlse64_v_u64m1(...) __riscv_vlse64_v_u64m1(__VA_ARGS__) |
| #define | vlse64_v_u64m1_m(...) __riscv_vlse64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlse64_v_u64m2(...) __riscv_vlse64_v_u64m2(__VA_ARGS__) |
| #define | vlse64_v_u64m2_m(...) __riscv_vlse64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlse64_v_u64m4(...) __riscv_vlse64_v_u64m4(__VA_ARGS__) |
| #define | vlse64_v_u64m4_m(...) __riscv_vlse64_v_u64m4_tumu(__VA_ARGS__) |
| #define | vlse64_v_u64m8(...) __riscv_vlse64_v_u64m8(__VA_ARGS__) |
| #define | vlse64_v_u64m8_m(...) __riscv_vlse64_v_u64m8_tumu(__VA_ARGS__) |
| #define | vlse8_v_i8m1(...) __riscv_vlse8_v_i8m1(__VA_ARGS__) |
| #define | vlse8_v_i8m1_m(...) __riscv_vlse8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlse8_v_i8m2(...) __riscv_vlse8_v_i8m2(__VA_ARGS__) |
| #define | vlse8_v_i8m2_m(...) __riscv_vlse8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlse8_v_i8m4(...) __riscv_vlse8_v_i8m4(__VA_ARGS__) |
| #define | vlse8_v_i8m4_m(...) __riscv_vlse8_v_i8m4_tumu(__VA_ARGS__) |
| #define | vlse8_v_i8m8(...) __riscv_vlse8_v_i8m8(__VA_ARGS__) |
| #define | vlse8_v_i8m8_m(...) __riscv_vlse8_v_i8m8_tumu(__VA_ARGS__) |
| #define | vlse8_v_i8mf2(...) __riscv_vlse8_v_i8mf2(__VA_ARGS__) |
| #define | vlse8_v_i8mf2_m(...) __riscv_vlse8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlse8_v_i8mf4(...) __riscv_vlse8_v_i8mf4(__VA_ARGS__) |
| #define | vlse8_v_i8mf4_m(...) __riscv_vlse8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlse8_v_i8mf8(...) __riscv_vlse8_v_i8mf8(__VA_ARGS__) |
| #define | vlse8_v_i8mf8_m(...) __riscv_vlse8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlse8_v_u8m1(...) __riscv_vlse8_v_u8m1(__VA_ARGS__) |
| #define | vlse8_v_u8m1_m(...) __riscv_vlse8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlse8_v_u8m2(...) __riscv_vlse8_v_u8m2(__VA_ARGS__) |
| #define | vlse8_v_u8m2_m(...) __riscv_vlse8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlse8_v_u8m4(...) __riscv_vlse8_v_u8m4(__VA_ARGS__) |
| #define | vlse8_v_u8m4_m(...) __riscv_vlse8_v_u8m4_tumu(__VA_ARGS__) |
| #define | vlse8_v_u8m8(...) __riscv_vlse8_v_u8m8(__VA_ARGS__) |
| #define | vlse8_v_u8m8_m(...) __riscv_vlse8_v_u8m8_tumu(__VA_ARGS__) |
| #define | vlse8_v_u8mf2(...) __riscv_vlse8_v_u8mf2(__VA_ARGS__) |
| #define | vlse8_v_u8mf2_m(...) __riscv_vlse8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlse8_v_u8mf4(...) __riscv_vlse8_v_u8mf4(__VA_ARGS__) |
| #define | vlse8_v_u8mf4_m(...) __riscv_vlse8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlse8_v_u8mf8(...) __riscv_vlse8_v_u8mf8(__VA_ARGS__) |
| #define | vlse8_v_u8mf8_m(...) __riscv_vlse8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_f16m1(...) __riscv_vlseg2e16_v_f16m1(__VA_ARGS__) |
| #define | vlseg2e16_v_f16m1_m(...) __riscv_vlseg2e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_f16m2(...) __riscv_vlseg2e16_v_f16m2(__VA_ARGS__) |
| #define | vlseg2e16_v_f16m2_m(...) __riscv_vlseg2e16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_f16m4(...) __riscv_vlseg2e16_v_f16m4(__VA_ARGS__) |
| #define | vlseg2e16_v_f16m4_m(...) __riscv_vlseg2e16_v_f16m4_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_f16mf2(...) __riscv_vlseg2e16_v_f16mf2(__VA_ARGS__) |
| #define | vlseg2e16_v_f16mf2_m(...) __riscv_vlseg2e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_f16mf4(...) __riscv_vlseg2e16_v_f16mf4(__VA_ARGS__) |
| #define | vlseg2e16_v_f16mf4_m(...) __riscv_vlseg2e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_i16m1(...) __riscv_vlseg2e16_v_i16m1(__VA_ARGS__) |
| #define | vlseg2e16_v_i16m1_m(...) __riscv_vlseg2e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_i16m2(...) __riscv_vlseg2e16_v_i16m2(__VA_ARGS__) |
| #define | vlseg2e16_v_i16m2_m(...) __riscv_vlseg2e16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_i16m4(...) __riscv_vlseg2e16_v_i16m4(__VA_ARGS__) |
| #define | vlseg2e16_v_i16m4_m(...) __riscv_vlseg2e16_v_i16m4_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_i16mf2(...) __riscv_vlseg2e16_v_i16mf2(__VA_ARGS__) |
| #define | vlseg2e16_v_i16mf2_m(...) __riscv_vlseg2e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_i16mf4(...) __riscv_vlseg2e16_v_i16mf4(__VA_ARGS__) |
| #define | vlseg2e16_v_i16mf4_m(...) __riscv_vlseg2e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_u16m1(...) __riscv_vlseg2e16_v_u16m1(__VA_ARGS__) |
| #define | vlseg2e16_v_u16m1_m(...) __riscv_vlseg2e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_u16m2(...) __riscv_vlseg2e16_v_u16m2(__VA_ARGS__) |
| #define | vlseg2e16_v_u16m2_m(...) __riscv_vlseg2e16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_u16m4(...) __riscv_vlseg2e16_v_u16m4(__VA_ARGS__) |
| #define | vlseg2e16_v_u16m4_m(...) __riscv_vlseg2e16_v_u16m4_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_u16mf2(...) __riscv_vlseg2e16_v_u16mf2(__VA_ARGS__) |
| #define | vlseg2e16_v_u16mf2_m(...) __riscv_vlseg2e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e16_v_u16mf4(...) __riscv_vlseg2e16_v_u16mf4(__VA_ARGS__) |
| #define | vlseg2e16_v_u16mf4_m(...) __riscv_vlseg2e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16m1(...) __riscv_vlseg2e16ff_v_f16m1(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16m1_m(...) __riscv_vlseg2e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16m2(...) __riscv_vlseg2e16ff_v_f16m2(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16m2_m(...) __riscv_vlseg2e16ff_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16m4(...) __riscv_vlseg2e16ff_v_f16m4(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16m4_m(...) __riscv_vlseg2e16ff_v_f16m4_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16mf2(...) __riscv_vlseg2e16ff_v_f16mf2(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16mf2_m(...) __riscv_vlseg2e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16mf4(...) __riscv_vlseg2e16ff_v_f16mf4(__VA_ARGS__) |
| #define | vlseg2e16ff_v_f16mf4_m(...) __riscv_vlseg2e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16m1(...) __riscv_vlseg2e16ff_v_i16m1(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16m1_m(...) __riscv_vlseg2e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16m2(...) __riscv_vlseg2e16ff_v_i16m2(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16m2_m(...) __riscv_vlseg2e16ff_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16m4(...) __riscv_vlseg2e16ff_v_i16m4(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16m4_m(...) __riscv_vlseg2e16ff_v_i16m4_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16mf2(...) __riscv_vlseg2e16ff_v_i16mf2(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16mf2_m(...) __riscv_vlseg2e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16mf4(...) __riscv_vlseg2e16ff_v_i16mf4(__VA_ARGS__) |
| #define | vlseg2e16ff_v_i16mf4_m(...) __riscv_vlseg2e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16m1(...) __riscv_vlseg2e16ff_v_u16m1(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16m1_m(...) __riscv_vlseg2e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16m2(...) __riscv_vlseg2e16ff_v_u16m2(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16m2_m(...) __riscv_vlseg2e16ff_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16m4(...) __riscv_vlseg2e16ff_v_u16m4(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16m4_m(...) __riscv_vlseg2e16ff_v_u16m4_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16mf2(...) __riscv_vlseg2e16ff_v_u16mf2(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16mf2_m(...) __riscv_vlseg2e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16mf4(...) __riscv_vlseg2e16ff_v_u16mf4(__VA_ARGS__) |
| #define | vlseg2e16ff_v_u16mf4_m(...) __riscv_vlseg2e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_f32m1(...) __riscv_vlseg2e32_v_f32m1(__VA_ARGS__) |
| #define | vlseg2e32_v_f32m1_m(...) __riscv_vlseg2e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_f32m2(...) __riscv_vlseg2e32_v_f32m2(__VA_ARGS__) |
| #define | vlseg2e32_v_f32m2_m(...) __riscv_vlseg2e32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_f32m4(...) __riscv_vlseg2e32_v_f32m4(__VA_ARGS__) |
| #define | vlseg2e32_v_f32m4_m(...) __riscv_vlseg2e32_v_f32m4_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_f32mf2(...) __riscv_vlseg2e32_v_f32mf2(__VA_ARGS__) |
| #define | vlseg2e32_v_f32mf2_m(...) __riscv_vlseg2e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_i32m1(...) __riscv_vlseg2e32_v_i32m1(__VA_ARGS__) |
| #define | vlseg2e32_v_i32m1_m(...) __riscv_vlseg2e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_i32m2(...) __riscv_vlseg2e32_v_i32m2(__VA_ARGS__) |
| #define | vlseg2e32_v_i32m2_m(...) __riscv_vlseg2e32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_i32m4(...) __riscv_vlseg2e32_v_i32m4(__VA_ARGS__) |
| #define | vlseg2e32_v_i32m4_m(...) __riscv_vlseg2e32_v_i32m4_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_i32mf2(...) __riscv_vlseg2e32_v_i32mf2(__VA_ARGS__) |
| #define | vlseg2e32_v_i32mf2_m(...) __riscv_vlseg2e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_u32m1(...) __riscv_vlseg2e32_v_u32m1(__VA_ARGS__) |
| #define | vlseg2e32_v_u32m1_m(...) __riscv_vlseg2e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_u32m2(...) __riscv_vlseg2e32_v_u32m2(__VA_ARGS__) |
| #define | vlseg2e32_v_u32m2_m(...) __riscv_vlseg2e32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_u32m4(...) __riscv_vlseg2e32_v_u32m4(__VA_ARGS__) |
| #define | vlseg2e32_v_u32m4_m(...) __riscv_vlseg2e32_v_u32m4_tumu(__VA_ARGS__) |
| #define | vlseg2e32_v_u32mf2(...) __riscv_vlseg2e32_v_u32mf2(__VA_ARGS__) |
| #define | vlseg2e32_v_u32mf2_m(...) __riscv_vlseg2e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_f32m1(...) __riscv_vlseg2e32ff_v_f32m1(__VA_ARGS__) |
| #define | vlseg2e32ff_v_f32m1_m(...) __riscv_vlseg2e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_f32m2(...) __riscv_vlseg2e32ff_v_f32m2(__VA_ARGS__) |
| #define | vlseg2e32ff_v_f32m2_m(...) __riscv_vlseg2e32ff_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_f32m4(...) __riscv_vlseg2e32ff_v_f32m4(__VA_ARGS__) |
| #define | vlseg2e32ff_v_f32m4_m(...) __riscv_vlseg2e32ff_v_f32m4_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_f32mf2(...) __riscv_vlseg2e32ff_v_f32mf2(__VA_ARGS__) |
| #define | vlseg2e32ff_v_f32mf2_m(...) __riscv_vlseg2e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_i32m1(...) __riscv_vlseg2e32ff_v_i32m1(__VA_ARGS__) |
| #define | vlseg2e32ff_v_i32m1_m(...) __riscv_vlseg2e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_i32m2(...) __riscv_vlseg2e32ff_v_i32m2(__VA_ARGS__) |
| #define | vlseg2e32ff_v_i32m2_m(...) __riscv_vlseg2e32ff_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_i32m4(...) __riscv_vlseg2e32ff_v_i32m4(__VA_ARGS__) |
| #define | vlseg2e32ff_v_i32m4_m(...) __riscv_vlseg2e32ff_v_i32m4_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_i32mf2(...) __riscv_vlseg2e32ff_v_i32mf2(__VA_ARGS__) |
| #define | vlseg2e32ff_v_i32mf2_m(...) __riscv_vlseg2e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_u32m1(...) __riscv_vlseg2e32ff_v_u32m1(__VA_ARGS__) |
| #define | vlseg2e32ff_v_u32m1_m(...) __riscv_vlseg2e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_u32m2(...) __riscv_vlseg2e32ff_v_u32m2(__VA_ARGS__) |
| #define | vlseg2e32ff_v_u32m2_m(...) __riscv_vlseg2e32ff_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_u32m4(...) __riscv_vlseg2e32ff_v_u32m4(__VA_ARGS__) |
| #define | vlseg2e32ff_v_u32m4_m(...) __riscv_vlseg2e32ff_v_u32m4_tumu(__VA_ARGS__) |
| #define | vlseg2e32ff_v_u32mf2(...) __riscv_vlseg2e32ff_v_u32mf2(__VA_ARGS__) |
| #define | vlseg2e32ff_v_u32mf2_m(...) __riscv_vlseg2e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_f64m1(...) __riscv_vlseg2e64_v_f64m1(__VA_ARGS__) |
| #define | vlseg2e64_v_f64m1_m(...) __riscv_vlseg2e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_f64m2(...) __riscv_vlseg2e64_v_f64m2(__VA_ARGS__) |
| #define | vlseg2e64_v_f64m2_m(...) __riscv_vlseg2e64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_f64m4(...) __riscv_vlseg2e64_v_f64m4(__VA_ARGS__) |
| #define | vlseg2e64_v_f64m4_m(...) __riscv_vlseg2e64_v_f64m4_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_i64m1(...) __riscv_vlseg2e64_v_i64m1(__VA_ARGS__) |
| #define | vlseg2e64_v_i64m1_m(...) __riscv_vlseg2e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_i64m2(...) __riscv_vlseg2e64_v_i64m2(__VA_ARGS__) |
| #define | vlseg2e64_v_i64m2_m(...) __riscv_vlseg2e64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_i64m4(...) __riscv_vlseg2e64_v_i64m4(__VA_ARGS__) |
| #define | vlseg2e64_v_i64m4_m(...) __riscv_vlseg2e64_v_i64m4_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_u64m1(...) __riscv_vlseg2e64_v_u64m1(__VA_ARGS__) |
| #define | vlseg2e64_v_u64m1_m(...) __riscv_vlseg2e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_u64m2(...) __riscv_vlseg2e64_v_u64m2(__VA_ARGS__) |
| #define | vlseg2e64_v_u64m2_m(...) __riscv_vlseg2e64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlseg2e64_v_u64m4(...) __riscv_vlseg2e64_v_u64m4(__VA_ARGS__) |
| #define | vlseg2e64_v_u64m4_m(...) __riscv_vlseg2e64_v_u64m4_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_f64m1(...) __riscv_vlseg2e64ff_v_f64m1(__VA_ARGS__) |
| #define | vlseg2e64ff_v_f64m1_m(...) __riscv_vlseg2e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_f64m2(...) __riscv_vlseg2e64ff_v_f64m2(__VA_ARGS__) |
| #define | vlseg2e64ff_v_f64m2_m(...) __riscv_vlseg2e64ff_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_f64m4(...) __riscv_vlseg2e64ff_v_f64m4(__VA_ARGS__) |
| #define | vlseg2e64ff_v_f64m4_m(...) __riscv_vlseg2e64ff_v_f64m4_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_i64m1(...) __riscv_vlseg2e64ff_v_i64m1(__VA_ARGS__) |
| #define | vlseg2e64ff_v_i64m1_m(...) __riscv_vlseg2e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_i64m2(...) __riscv_vlseg2e64ff_v_i64m2(__VA_ARGS__) |
| #define | vlseg2e64ff_v_i64m2_m(...) __riscv_vlseg2e64ff_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_i64m4(...) __riscv_vlseg2e64ff_v_i64m4(__VA_ARGS__) |
| #define | vlseg2e64ff_v_i64m4_m(...) __riscv_vlseg2e64ff_v_i64m4_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_u64m1(...) __riscv_vlseg2e64ff_v_u64m1(__VA_ARGS__) |
| #define | vlseg2e64ff_v_u64m1_m(...) __riscv_vlseg2e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_u64m2(...) __riscv_vlseg2e64ff_v_u64m2(__VA_ARGS__) |
| #define | vlseg2e64ff_v_u64m2_m(...) __riscv_vlseg2e64ff_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlseg2e64ff_v_u64m4(...) __riscv_vlseg2e64ff_v_u64m4(__VA_ARGS__) |
| #define | vlseg2e64ff_v_u64m4_m(...) __riscv_vlseg2e64ff_v_u64m4_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_i8m1(...) __riscv_vlseg2e8_v_i8m1(__VA_ARGS__) |
| #define | vlseg2e8_v_i8m1_m(...) __riscv_vlseg2e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_i8m2(...) __riscv_vlseg2e8_v_i8m2(__VA_ARGS__) |
| #define | vlseg2e8_v_i8m2_m(...) __riscv_vlseg2e8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_i8m4(...) __riscv_vlseg2e8_v_i8m4(__VA_ARGS__) |
| #define | vlseg2e8_v_i8m4_m(...) __riscv_vlseg2e8_v_i8m4_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_i8mf2(...) __riscv_vlseg2e8_v_i8mf2(__VA_ARGS__) |
| #define | vlseg2e8_v_i8mf2_m(...) __riscv_vlseg2e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_i8mf4(...) __riscv_vlseg2e8_v_i8mf4(__VA_ARGS__) |
| #define | vlseg2e8_v_i8mf4_m(...) __riscv_vlseg2e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_i8mf8(...) __riscv_vlseg2e8_v_i8mf8(__VA_ARGS__) |
| #define | vlseg2e8_v_i8mf8_m(...) __riscv_vlseg2e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_u8m1(...) __riscv_vlseg2e8_v_u8m1(__VA_ARGS__) |
| #define | vlseg2e8_v_u8m1_m(...) __riscv_vlseg2e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_u8m2(...) __riscv_vlseg2e8_v_u8m2(__VA_ARGS__) |
| #define | vlseg2e8_v_u8m2_m(...) __riscv_vlseg2e8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_u8m4(...) __riscv_vlseg2e8_v_u8m4(__VA_ARGS__) |
| #define | vlseg2e8_v_u8m4_m(...) __riscv_vlseg2e8_v_u8m4_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_u8mf2(...) __riscv_vlseg2e8_v_u8mf2(__VA_ARGS__) |
| #define | vlseg2e8_v_u8mf2_m(...) __riscv_vlseg2e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_u8mf4(...) __riscv_vlseg2e8_v_u8mf4(__VA_ARGS__) |
| #define | vlseg2e8_v_u8mf4_m(...) __riscv_vlseg2e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e8_v_u8mf8(...) __riscv_vlseg2e8_v_u8mf8(__VA_ARGS__) |
| #define | vlseg2e8_v_u8mf8_m(...) __riscv_vlseg2e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8m1(...) __riscv_vlseg2e8ff_v_i8m1(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8m1_m(...) __riscv_vlseg2e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8m2(...) __riscv_vlseg2e8ff_v_i8m2(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8m2_m(...) __riscv_vlseg2e8ff_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8m4(...) __riscv_vlseg2e8ff_v_i8m4(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8m4_m(...) __riscv_vlseg2e8ff_v_i8m4_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8mf2(...) __riscv_vlseg2e8ff_v_i8mf2(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8mf2_m(...) __riscv_vlseg2e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8mf4(...) __riscv_vlseg2e8ff_v_i8mf4(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8mf4_m(...) __riscv_vlseg2e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8mf8(...) __riscv_vlseg2e8ff_v_i8mf8(__VA_ARGS__) |
| #define | vlseg2e8ff_v_i8mf8_m(...) __riscv_vlseg2e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8m1(...) __riscv_vlseg2e8ff_v_u8m1(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8m1_m(...) __riscv_vlseg2e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8m2(...) __riscv_vlseg2e8ff_v_u8m2(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8m2_m(...) __riscv_vlseg2e8ff_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8m4(...) __riscv_vlseg2e8ff_v_u8m4(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8m4_m(...) __riscv_vlseg2e8ff_v_u8m4_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8mf2(...) __riscv_vlseg2e8ff_v_u8mf2(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8mf2_m(...) __riscv_vlseg2e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8mf4(...) __riscv_vlseg2e8ff_v_u8mf4(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8mf4_m(...) __riscv_vlseg2e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8mf8(...) __riscv_vlseg2e8ff_v_u8mf8(__VA_ARGS__) |
| #define | vlseg2e8ff_v_u8mf8_m(...) __riscv_vlseg2e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_f16m1(...) __riscv_vlseg3e16_v_f16m1(__VA_ARGS__) |
| #define | vlseg3e16_v_f16m1_m(...) __riscv_vlseg3e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_f16m2(...) __riscv_vlseg3e16_v_f16m2(__VA_ARGS__) |
| #define | vlseg3e16_v_f16m2_m(...) __riscv_vlseg3e16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_f16mf2(...) __riscv_vlseg3e16_v_f16mf2(__VA_ARGS__) |
| #define | vlseg3e16_v_f16mf2_m(...) __riscv_vlseg3e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_f16mf4(...) __riscv_vlseg3e16_v_f16mf4(__VA_ARGS__) |
| #define | vlseg3e16_v_f16mf4_m(...) __riscv_vlseg3e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_i16m1(...) __riscv_vlseg3e16_v_i16m1(__VA_ARGS__) |
| #define | vlseg3e16_v_i16m1_m(...) __riscv_vlseg3e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_i16m2(...) __riscv_vlseg3e16_v_i16m2(__VA_ARGS__) |
| #define | vlseg3e16_v_i16m2_m(...) __riscv_vlseg3e16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_i16mf2(...) __riscv_vlseg3e16_v_i16mf2(__VA_ARGS__) |
| #define | vlseg3e16_v_i16mf2_m(...) __riscv_vlseg3e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_i16mf4(...) __riscv_vlseg3e16_v_i16mf4(__VA_ARGS__) |
| #define | vlseg3e16_v_i16mf4_m(...) __riscv_vlseg3e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_u16m1(...) __riscv_vlseg3e16_v_u16m1(__VA_ARGS__) |
| #define | vlseg3e16_v_u16m1_m(...) __riscv_vlseg3e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_u16m2(...) __riscv_vlseg3e16_v_u16m2(__VA_ARGS__) |
| #define | vlseg3e16_v_u16m2_m(...) __riscv_vlseg3e16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_u16mf2(...) __riscv_vlseg3e16_v_u16mf2(__VA_ARGS__) |
| #define | vlseg3e16_v_u16mf2_m(...) __riscv_vlseg3e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e16_v_u16mf4(...) __riscv_vlseg3e16_v_u16mf4(__VA_ARGS__) |
| #define | vlseg3e16_v_u16mf4_m(...) __riscv_vlseg3e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_f16m1(...) __riscv_vlseg3e16ff_v_f16m1(__VA_ARGS__) |
| #define | vlseg3e16ff_v_f16m1_m(...) __riscv_vlseg3e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_f16m2(...) __riscv_vlseg3e16ff_v_f16m2(__VA_ARGS__) |
| #define | vlseg3e16ff_v_f16m2_m(...) __riscv_vlseg3e16ff_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_f16mf2(...) __riscv_vlseg3e16ff_v_f16mf2(__VA_ARGS__) |
| #define | vlseg3e16ff_v_f16mf2_m(...) __riscv_vlseg3e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_f16mf4(...) __riscv_vlseg3e16ff_v_f16mf4(__VA_ARGS__) |
| #define | vlseg3e16ff_v_f16mf4_m(...) __riscv_vlseg3e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_i16m1(...) __riscv_vlseg3e16ff_v_i16m1(__VA_ARGS__) |
| #define | vlseg3e16ff_v_i16m1_m(...) __riscv_vlseg3e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_i16m2(...) __riscv_vlseg3e16ff_v_i16m2(__VA_ARGS__) |
| #define | vlseg3e16ff_v_i16m2_m(...) __riscv_vlseg3e16ff_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_i16mf2(...) __riscv_vlseg3e16ff_v_i16mf2(__VA_ARGS__) |
| #define | vlseg3e16ff_v_i16mf2_m(...) __riscv_vlseg3e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_i16mf4(...) __riscv_vlseg3e16ff_v_i16mf4(__VA_ARGS__) |
| #define | vlseg3e16ff_v_i16mf4_m(...) __riscv_vlseg3e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_u16m1(...) __riscv_vlseg3e16ff_v_u16m1(__VA_ARGS__) |
| #define | vlseg3e16ff_v_u16m1_m(...) __riscv_vlseg3e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_u16m2(...) __riscv_vlseg3e16ff_v_u16m2(__VA_ARGS__) |
| #define | vlseg3e16ff_v_u16m2_m(...) __riscv_vlseg3e16ff_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_u16mf2(...) __riscv_vlseg3e16ff_v_u16mf2(__VA_ARGS__) |
| #define | vlseg3e16ff_v_u16mf2_m(...) __riscv_vlseg3e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e16ff_v_u16mf4(...) __riscv_vlseg3e16ff_v_u16mf4(__VA_ARGS__) |
| #define | vlseg3e16ff_v_u16mf4_m(...) __riscv_vlseg3e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_f32m1(...) __riscv_vlseg3e32_v_f32m1(__VA_ARGS__) |
| #define | vlseg3e32_v_f32m1_m(...) __riscv_vlseg3e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_f32m2(...) __riscv_vlseg3e32_v_f32m2(__VA_ARGS__) |
| #define | vlseg3e32_v_f32m2_m(...) __riscv_vlseg3e32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_f32mf2(...) __riscv_vlseg3e32_v_f32mf2(__VA_ARGS__) |
| #define | vlseg3e32_v_f32mf2_m(...) __riscv_vlseg3e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_i32m1(...) __riscv_vlseg3e32_v_i32m1(__VA_ARGS__) |
| #define | vlseg3e32_v_i32m1_m(...) __riscv_vlseg3e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_i32m2(...) __riscv_vlseg3e32_v_i32m2(__VA_ARGS__) |
| #define | vlseg3e32_v_i32m2_m(...) __riscv_vlseg3e32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_i32mf2(...) __riscv_vlseg3e32_v_i32mf2(__VA_ARGS__) |
| #define | vlseg3e32_v_i32mf2_m(...) __riscv_vlseg3e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_u32m1(...) __riscv_vlseg3e32_v_u32m1(__VA_ARGS__) |
| #define | vlseg3e32_v_u32m1_m(...) __riscv_vlseg3e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_u32m2(...) __riscv_vlseg3e32_v_u32m2(__VA_ARGS__) |
| #define | vlseg3e32_v_u32m2_m(...) __riscv_vlseg3e32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlseg3e32_v_u32mf2(...) __riscv_vlseg3e32_v_u32mf2(__VA_ARGS__) |
| #define | vlseg3e32_v_u32mf2_m(...) __riscv_vlseg3e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_f32m1(...) __riscv_vlseg3e32ff_v_f32m1(__VA_ARGS__) |
| #define | vlseg3e32ff_v_f32m1_m(...) __riscv_vlseg3e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_f32m2(...) __riscv_vlseg3e32ff_v_f32m2(__VA_ARGS__) |
| #define | vlseg3e32ff_v_f32m2_m(...) __riscv_vlseg3e32ff_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_f32mf2(...) __riscv_vlseg3e32ff_v_f32mf2(__VA_ARGS__) |
| #define | vlseg3e32ff_v_f32mf2_m(...) __riscv_vlseg3e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_i32m1(...) __riscv_vlseg3e32ff_v_i32m1(__VA_ARGS__) |
| #define | vlseg3e32ff_v_i32m1_m(...) __riscv_vlseg3e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_i32m2(...) __riscv_vlseg3e32ff_v_i32m2(__VA_ARGS__) |
| #define | vlseg3e32ff_v_i32m2_m(...) __riscv_vlseg3e32ff_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_i32mf2(...) __riscv_vlseg3e32ff_v_i32mf2(__VA_ARGS__) |
| #define | vlseg3e32ff_v_i32mf2_m(...) __riscv_vlseg3e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_u32m1(...) __riscv_vlseg3e32ff_v_u32m1(__VA_ARGS__) |
| #define | vlseg3e32ff_v_u32m1_m(...) __riscv_vlseg3e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_u32m2(...) __riscv_vlseg3e32ff_v_u32m2(__VA_ARGS__) |
| #define | vlseg3e32ff_v_u32m2_m(...) __riscv_vlseg3e32ff_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlseg3e32ff_v_u32mf2(...) __riscv_vlseg3e32ff_v_u32mf2(__VA_ARGS__) |
| #define | vlseg3e32ff_v_u32mf2_m(...) __riscv_vlseg3e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e64_v_f64m1(...) __riscv_vlseg3e64_v_f64m1(__VA_ARGS__) |
| #define | vlseg3e64_v_f64m1_m(...) __riscv_vlseg3e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg3e64_v_f64m2(...) __riscv_vlseg3e64_v_f64m2(__VA_ARGS__) |
| #define | vlseg3e64_v_f64m2_m(...) __riscv_vlseg3e64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlseg3e64_v_i64m1(...) __riscv_vlseg3e64_v_i64m1(__VA_ARGS__) |
| #define | vlseg3e64_v_i64m1_m(...) __riscv_vlseg3e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg3e64_v_i64m2(...) __riscv_vlseg3e64_v_i64m2(__VA_ARGS__) |
| #define | vlseg3e64_v_i64m2_m(...) __riscv_vlseg3e64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlseg3e64_v_u64m1(...) __riscv_vlseg3e64_v_u64m1(__VA_ARGS__) |
| #define | vlseg3e64_v_u64m1_m(...) __riscv_vlseg3e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg3e64_v_u64m2(...) __riscv_vlseg3e64_v_u64m2(__VA_ARGS__) |
| #define | vlseg3e64_v_u64m2_m(...) __riscv_vlseg3e64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlseg3e64ff_v_f64m1(...) __riscv_vlseg3e64ff_v_f64m1(__VA_ARGS__) |
| #define | vlseg3e64ff_v_f64m1_m(...) __riscv_vlseg3e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg3e64ff_v_f64m2(...) __riscv_vlseg3e64ff_v_f64m2(__VA_ARGS__) |
| #define | vlseg3e64ff_v_f64m2_m(...) __riscv_vlseg3e64ff_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlseg3e64ff_v_i64m1(...) __riscv_vlseg3e64ff_v_i64m1(__VA_ARGS__) |
| #define | vlseg3e64ff_v_i64m1_m(...) __riscv_vlseg3e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg3e64ff_v_i64m2(...) __riscv_vlseg3e64ff_v_i64m2(__VA_ARGS__) |
| #define | vlseg3e64ff_v_i64m2_m(...) __riscv_vlseg3e64ff_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlseg3e64ff_v_u64m1(...) __riscv_vlseg3e64ff_v_u64m1(__VA_ARGS__) |
| #define | vlseg3e64ff_v_u64m1_m(...) __riscv_vlseg3e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg3e64ff_v_u64m2(...) __riscv_vlseg3e64ff_v_u64m2(__VA_ARGS__) |
| #define | vlseg3e64ff_v_u64m2_m(...) __riscv_vlseg3e64ff_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_i8m1(...) __riscv_vlseg3e8_v_i8m1(__VA_ARGS__) |
| #define | vlseg3e8_v_i8m1_m(...) __riscv_vlseg3e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_i8m2(...) __riscv_vlseg3e8_v_i8m2(__VA_ARGS__) |
| #define | vlseg3e8_v_i8m2_m(...) __riscv_vlseg3e8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_i8mf2(...) __riscv_vlseg3e8_v_i8mf2(__VA_ARGS__) |
| #define | vlseg3e8_v_i8mf2_m(...) __riscv_vlseg3e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_i8mf4(...) __riscv_vlseg3e8_v_i8mf4(__VA_ARGS__) |
| #define | vlseg3e8_v_i8mf4_m(...) __riscv_vlseg3e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_i8mf8(...) __riscv_vlseg3e8_v_i8mf8(__VA_ARGS__) |
| #define | vlseg3e8_v_i8mf8_m(...) __riscv_vlseg3e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_u8m1(...) __riscv_vlseg3e8_v_u8m1(__VA_ARGS__) |
| #define | vlseg3e8_v_u8m1_m(...) __riscv_vlseg3e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_u8m2(...) __riscv_vlseg3e8_v_u8m2(__VA_ARGS__) |
| #define | vlseg3e8_v_u8m2_m(...) __riscv_vlseg3e8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_u8mf2(...) __riscv_vlseg3e8_v_u8mf2(__VA_ARGS__) |
| #define | vlseg3e8_v_u8mf2_m(...) __riscv_vlseg3e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_u8mf4(...) __riscv_vlseg3e8_v_u8mf4(__VA_ARGS__) |
| #define | vlseg3e8_v_u8mf4_m(...) __riscv_vlseg3e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e8_v_u8mf8(...) __riscv_vlseg3e8_v_u8mf8(__VA_ARGS__) |
| #define | vlseg3e8_v_u8mf8_m(...) __riscv_vlseg3e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8m1(...) __riscv_vlseg3e8ff_v_i8m1(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8m1_m(...) __riscv_vlseg3e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8m2(...) __riscv_vlseg3e8ff_v_i8m2(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8m2_m(...) __riscv_vlseg3e8ff_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8mf2(...) __riscv_vlseg3e8ff_v_i8mf2(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8mf2_m(...) __riscv_vlseg3e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8mf4(...) __riscv_vlseg3e8ff_v_i8mf4(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8mf4_m(...) __riscv_vlseg3e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8mf8(...) __riscv_vlseg3e8ff_v_i8mf8(__VA_ARGS__) |
| #define | vlseg3e8ff_v_i8mf8_m(...) __riscv_vlseg3e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8m1(...) __riscv_vlseg3e8ff_v_u8m1(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8m1_m(...) __riscv_vlseg3e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8m2(...) __riscv_vlseg3e8ff_v_u8m2(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8m2_m(...) __riscv_vlseg3e8ff_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8mf2(...) __riscv_vlseg3e8ff_v_u8mf2(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8mf2_m(...) __riscv_vlseg3e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8mf4(...) __riscv_vlseg3e8ff_v_u8mf4(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8mf4_m(...) __riscv_vlseg3e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8mf8(...) __riscv_vlseg3e8ff_v_u8mf8(__VA_ARGS__) |
| #define | vlseg3e8ff_v_u8mf8_m(...) __riscv_vlseg3e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_f16m1(...) __riscv_vlseg4e16_v_f16m1(__VA_ARGS__) |
| #define | vlseg4e16_v_f16m1_m(...) __riscv_vlseg4e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_f16m2(...) __riscv_vlseg4e16_v_f16m2(__VA_ARGS__) |
| #define | vlseg4e16_v_f16m2_m(...) __riscv_vlseg4e16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_f16mf2(...) __riscv_vlseg4e16_v_f16mf2(__VA_ARGS__) |
| #define | vlseg4e16_v_f16mf2_m(...) __riscv_vlseg4e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_f16mf4(...) __riscv_vlseg4e16_v_f16mf4(__VA_ARGS__) |
| #define | vlseg4e16_v_f16mf4_m(...) __riscv_vlseg4e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_i16m1(...) __riscv_vlseg4e16_v_i16m1(__VA_ARGS__) |
| #define | vlseg4e16_v_i16m1_m(...) __riscv_vlseg4e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_i16m2(...) __riscv_vlseg4e16_v_i16m2(__VA_ARGS__) |
| #define | vlseg4e16_v_i16m2_m(...) __riscv_vlseg4e16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_i16mf2(...) __riscv_vlseg4e16_v_i16mf2(__VA_ARGS__) |
| #define | vlseg4e16_v_i16mf2_m(...) __riscv_vlseg4e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_i16mf4(...) __riscv_vlseg4e16_v_i16mf4(__VA_ARGS__) |
| #define | vlseg4e16_v_i16mf4_m(...) __riscv_vlseg4e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_u16m1(...) __riscv_vlseg4e16_v_u16m1(__VA_ARGS__) |
| #define | vlseg4e16_v_u16m1_m(...) __riscv_vlseg4e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_u16m2(...) __riscv_vlseg4e16_v_u16m2(__VA_ARGS__) |
| #define | vlseg4e16_v_u16m2_m(...) __riscv_vlseg4e16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_u16mf2(...) __riscv_vlseg4e16_v_u16mf2(__VA_ARGS__) |
| #define | vlseg4e16_v_u16mf2_m(...) __riscv_vlseg4e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e16_v_u16mf4(...) __riscv_vlseg4e16_v_u16mf4(__VA_ARGS__) |
| #define | vlseg4e16_v_u16mf4_m(...) __riscv_vlseg4e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_f16m1(...) __riscv_vlseg4e16ff_v_f16m1(__VA_ARGS__) |
| #define | vlseg4e16ff_v_f16m1_m(...) __riscv_vlseg4e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_f16m2(...) __riscv_vlseg4e16ff_v_f16m2(__VA_ARGS__) |
| #define | vlseg4e16ff_v_f16m2_m(...) __riscv_vlseg4e16ff_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_f16mf2(...) __riscv_vlseg4e16ff_v_f16mf2(__VA_ARGS__) |
| #define | vlseg4e16ff_v_f16mf2_m(...) __riscv_vlseg4e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_f16mf4(...) __riscv_vlseg4e16ff_v_f16mf4(__VA_ARGS__) |
| #define | vlseg4e16ff_v_f16mf4_m(...) __riscv_vlseg4e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_i16m1(...) __riscv_vlseg4e16ff_v_i16m1(__VA_ARGS__) |
| #define | vlseg4e16ff_v_i16m1_m(...) __riscv_vlseg4e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_i16m2(...) __riscv_vlseg4e16ff_v_i16m2(__VA_ARGS__) |
| #define | vlseg4e16ff_v_i16m2_m(...) __riscv_vlseg4e16ff_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_i16mf2(...) __riscv_vlseg4e16ff_v_i16mf2(__VA_ARGS__) |
| #define | vlseg4e16ff_v_i16mf2_m(...) __riscv_vlseg4e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_i16mf4(...) __riscv_vlseg4e16ff_v_i16mf4(__VA_ARGS__) |
| #define | vlseg4e16ff_v_i16mf4_m(...) __riscv_vlseg4e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_u16m1(...) __riscv_vlseg4e16ff_v_u16m1(__VA_ARGS__) |
| #define | vlseg4e16ff_v_u16m1_m(...) __riscv_vlseg4e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_u16m2(...) __riscv_vlseg4e16ff_v_u16m2(__VA_ARGS__) |
| #define | vlseg4e16ff_v_u16m2_m(...) __riscv_vlseg4e16ff_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_u16mf2(...) __riscv_vlseg4e16ff_v_u16mf2(__VA_ARGS__) |
| #define | vlseg4e16ff_v_u16mf2_m(...) __riscv_vlseg4e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e16ff_v_u16mf4(...) __riscv_vlseg4e16ff_v_u16mf4(__VA_ARGS__) |
| #define | vlseg4e16ff_v_u16mf4_m(...) __riscv_vlseg4e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_f32m1(...) __riscv_vlseg4e32_v_f32m1(__VA_ARGS__) |
| #define | vlseg4e32_v_f32m1_m(...) __riscv_vlseg4e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_f32m2(...) __riscv_vlseg4e32_v_f32m2(__VA_ARGS__) |
| #define | vlseg4e32_v_f32m2_m(...) __riscv_vlseg4e32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_f32mf2(...) __riscv_vlseg4e32_v_f32mf2(__VA_ARGS__) |
| #define | vlseg4e32_v_f32mf2_m(...) __riscv_vlseg4e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_i32m1(...) __riscv_vlseg4e32_v_i32m1(__VA_ARGS__) |
| #define | vlseg4e32_v_i32m1_m(...) __riscv_vlseg4e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_i32m2(...) __riscv_vlseg4e32_v_i32m2(__VA_ARGS__) |
| #define | vlseg4e32_v_i32m2_m(...) __riscv_vlseg4e32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_i32mf2(...) __riscv_vlseg4e32_v_i32mf2(__VA_ARGS__) |
| #define | vlseg4e32_v_i32mf2_m(...) __riscv_vlseg4e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_u32m1(...) __riscv_vlseg4e32_v_u32m1(__VA_ARGS__) |
| #define | vlseg4e32_v_u32m1_m(...) __riscv_vlseg4e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_u32m2(...) __riscv_vlseg4e32_v_u32m2(__VA_ARGS__) |
| #define | vlseg4e32_v_u32m2_m(...) __riscv_vlseg4e32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlseg4e32_v_u32mf2(...) __riscv_vlseg4e32_v_u32mf2(__VA_ARGS__) |
| #define | vlseg4e32_v_u32mf2_m(...) __riscv_vlseg4e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_f32m1(...) __riscv_vlseg4e32ff_v_f32m1(__VA_ARGS__) |
| #define | vlseg4e32ff_v_f32m1_m(...) __riscv_vlseg4e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_f32m2(...) __riscv_vlseg4e32ff_v_f32m2(__VA_ARGS__) |
| #define | vlseg4e32ff_v_f32m2_m(...) __riscv_vlseg4e32ff_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_f32mf2(...) __riscv_vlseg4e32ff_v_f32mf2(__VA_ARGS__) |
| #define | vlseg4e32ff_v_f32mf2_m(...) __riscv_vlseg4e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_i32m1(...) __riscv_vlseg4e32ff_v_i32m1(__VA_ARGS__) |
| #define | vlseg4e32ff_v_i32m1_m(...) __riscv_vlseg4e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_i32m2(...) __riscv_vlseg4e32ff_v_i32m2(__VA_ARGS__) |
| #define | vlseg4e32ff_v_i32m2_m(...) __riscv_vlseg4e32ff_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_i32mf2(...) __riscv_vlseg4e32ff_v_i32mf2(__VA_ARGS__) |
| #define | vlseg4e32ff_v_i32mf2_m(...) __riscv_vlseg4e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_u32m1(...) __riscv_vlseg4e32ff_v_u32m1(__VA_ARGS__) |
| #define | vlseg4e32ff_v_u32m1_m(...) __riscv_vlseg4e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_u32m2(...) __riscv_vlseg4e32ff_v_u32m2(__VA_ARGS__) |
| #define | vlseg4e32ff_v_u32m2_m(...) __riscv_vlseg4e32ff_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlseg4e32ff_v_u32mf2(...) __riscv_vlseg4e32ff_v_u32mf2(__VA_ARGS__) |
| #define | vlseg4e32ff_v_u32mf2_m(...) __riscv_vlseg4e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e64_v_f64m1(...) __riscv_vlseg4e64_v_f64m1(__VA_ARGS__) |
| #define | vlseg4e64_v_f64m1_m(...) __riscv_vlseg4e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg4e64_v_f64m2(...) __riscv_vlseg4e64_v_f64m2(__VA_ARGS__) |
| #define | vlseg4e64_v_f64m2_m(...) __riscv_vlseg4e64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlseg4e64_v_i64m1(...) __riscv_vlseg4e64_v_i64m1(__VA_ARGS__) |
| #define | vlseg4e64_v_i64m1_m(...) __riscv_vlseg4e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg4e64_v_i64m2(...) __riscv_vlseg4e64_v_i64m2(__VA_ARGS__) |
| #define | vlseg4e64_v_i64m2_m(...) __riscv_vlseg4e64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlseg4e64_v_u64m1(...) __riscv_vlseg4e64_v_u64m1(__VA_ARGS__) |
| #define | vlseg4e64_v_u64m1_m(...) __riscv_vlseg4e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg4e64_v_u64m2(...) __riscv_vlseg4e64_v_u64m2(__VA_ARGS__) |
| #define | vlseg4e64_v_u64m2_m(...) __riscv_vlseg4e64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlseg4e64ff_v_f64m1(...) __riscv_vlseg4e64ff_v_f64m1(__VA_ARGS__) |
| #define | vlseg4e64ff_v_f64m1_m(...) __riscv_vlseg4e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg4e64ff_v_f64m2(...) __riscv_vlseg4e64ff_v_f64m2(__VA_ARGS__) |
| #define | vlseg4e64ff_v_f64m2_m(...) __riscv_vlseg4e64ff_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlseg4e64ff_v_i64m1(...) __riscv_vlseg4e64ff_v_i64m1(__VA_ARGS__) |
| #define | vlseg4e64ff_v_i64m1_m(...) __riscv_vlseg4e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg4e64ff_v_i64m2(...) __riscv_vlseg4e64ff_v_i64m2(__VA_ARGS__) |
| #define | vlseg4e64ff_v_i64m2_m(...) __riscv_vlseg4e64ff_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlseg4e64ff_v_u64m1(...) __riscv_vlseg4e64ff_v_u64m1(__VA_ARGS__) |
| #define | vlseg4e64ff_v_u64m1_m(...) __riscv_vlseg4e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg4e64ff_v_u64m2(...) __riscv_vlseg4e64ff_v_u64m2(__VA_ARGS__) |
| #define | vlseg4e64ff_v_u64m2_m(...) __riscv_vlseg4e64ff_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_i8m1(...) __riscv_vlseg4e8_v_i8m1(__VA_ARGS__) |
| #define | vlseg4e8_v_i8m1_m(...) __riscv_vlseg4e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_i8m2(...) __riscv_vlseg4e8_v_i8m2(__VA_ARGS__) |
| #define | vlseg4e8_v_i8m2_m(...) __riscv_vlseg4e8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_i8mf2(...) __riscv_vlseg4e8_v_i8mf2(__VA_ARGS__) |
| #define | vlseg4e8_v_i8mf2_m(...) __riscv_vlseg4e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_i8mf4(...) __riscv_vlseg4e8_v_i8mf4(__VA_ARGS__) |
| #define | vlseg4e8_v_i8mf4_m(...) __riscv_vlseg4e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_i8mf8(...) __riscv_vlseg4e8_v_i8mf8(__VA_ARGS__) |
| #define | vlseg4e8_v_i8mf8_m(...) __riscv_vlseg4e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_u8m1(...) __riscv_vlseg4e8_v_u8m1(__VA_ARGS__) |
| #define | vlseg4e8_v_u8m1_m(...) __riscv_vlseg4e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_u8m2(...) __riscv_vlseg4e8_v_u8m2(__VA_ARGS__) |
| #define | vlseg4e8_v_u8m2_m(...) __riscv_vlseg4e8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_u8mf2(...) __riscv_vlseg4e8_v_u8mf2(__VA_ARGS__) |
| #define | vlseg4e8_v_u8mf2_m(...) __riscv_vlseg4e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_u8mf4(...) __riscv_vlseg4e8_v_u8mf4(__VA_ARGS__) |
| #define | vlseg4e8_v_u8mf4_m(...) __riscv_vlseg4e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e8_v_u8mf8(...) __riscv_vlseg4e8_v_u8mf8(__VA_ARGS__) |
| #define | vlseg4e8_v_u8mf8_m(...) __riscv_vlseg4e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8m1(...) __riscv_vlseg4e8ff_v_i8m1(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8m1_m(...) __riscv_vlseg4e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8m2(...) __riscv_vlseg4e8ff_v_i8m2(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8m2_m(...) __riscv_vlseg4e8ff_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8mf2(...) __riscv_vlseg4e8ff_v_i8mf2(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8mf2_m(...) __riscv_vlseg4e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8mf4(...) __riscv_vlseg4e8ff_v_i8mf4(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8mf4_m(...) __riscv_vlseg4e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8mf8(...) __riscv_vlseg4e8ff_v_i8mf8(__VA_ARGS__) |
| #define | vlseg4e8ff_v_i8mf8_m(...) __riscv_vlseg4e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8m1(...) __riscv_vlseg4e8ff_v_u8m1(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8m1_m(...) __riscv_vlseg4e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8m2(...) __riscv_vlseg4e8ff_v_u8m2(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8m2_m(...) __riscv_vlseg4e8ff_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8mf2(...) __riscv_vlseg4e8ff_v_u8mf2(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8mf2_m(...) __riscv_vlseg4e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8mf4(...) __riscv_vlseg4e8ff_v_u8mf4(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8mf4_m(...) __riscv_vlseg4e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8mf8(...) __riscv_vlseg4e8ff_v_u8mf8(__VA_ARGS__) |
| #define | vlseg4e8ff_v_u8mf8_m(...) __riscv_vlseg4e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_f16m1(...) __riscv_vlseg5e16_v_f16m1(__VA_ARGS__) |
| #define | vlseg5e16_v_f16m1_m(...) __riscv_vlseg5e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_f16mf2(...) __riscv_vlseg5e16_v_f16mf2(__VA_ARGS__) |
| #define | vlseg5e16_v_f16mf2_m(...) __riscv_vlseg5e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_f16mf4(...) __riscv_vlseg5e16_v_f16mf4(__VA_ARGS__) |
| #define | vlseg5e16_v_f16mf4_m(...) __riscv_vlseg5e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_i16m1(...) __riscv_vlseg5e16_v_i16m1(__VA_ARGS__) |
| #define | vlseg5e16_v_i16m1_m(...) __riscv_vlseg5e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_i16mf2(...) __riscv_vlseg5e16_v_i16mf2(__VA_ARGS__) |
| #define | vlseg5e16_v_i16mf2_m(...) __riscv_vlseg5e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_i16mf4(...) __riscv_vlseg5e16_v_i16mf4(__VA_ARGS__) |
| #define | vlseg5e16_v_i16mf4_m(...) __riscv_vlseg5e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_u16m1(...) __riscv_vlseg5e16_v_u16m1(__VA_ARGS__) |
| #define | vlseg5e16_v_u16m1_m(...) __riscv_vlseg5e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_u16mf2(...) __riscv_vlseg5e16_v_u16mf2(__VA_ARGS__) |
| #define | vlseg5e16_v_u16mf2_m(...) __riscv_vlseg5e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e16_v_u16mf4(...) __riscv_vlseg5e16_v_u16mf4(__VA_ARGS__) |
| #define | vlseg5e16_v_u16mf4_m(...) __riscv_vlseg5e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_f16m1(...) __riscv_vlseg5e16ff_v_f16m1(__VA_ARGS__) |
| #define | vlseg5e16ff_v_f16m1_m(...) __riscv_vlseg5e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_f16mf2(...) __riscv_vlseg5e16ff_v_f16mf2(__VA_ARGS__) |
| #define | vlseg5e16ff_v_f16mf2_m(...) __riscv_vlseg5e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_f16mf4(...) __riscv_vlseg5e16ff_v_f16mf4(__VA_ARGS__) |
| #define | vlseg5e16ff_v_f16mf4_m(...) __riscv_vlseg5e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_i16m1(...) __riscv_vlseg5e16ff_v_i16m1(__VA_ARGS__) |
| #define | vlseg5e16ff_v_i16m1_m(...) __riscv_vlseg5e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_i16mf2(...) __riscv_vlseg5e16ff_v_i16mf2(__VA_ARGS__) |
| #define | vlseg5e16ff_v_i16mf2_m(...) __riscv_vlseg5e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_i16mf4(...) __riscv_vlseg5e16ff_v_i16mf4(__VA_ARGS__) |
| #define | vlseg5e16ff_v_i16mf4_m(...) __riscv_vlseg5e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_u16m1(...) __riscv_vlseg5e16ff_v_u16m1(__VA_ARGS__) |
| #define | vlseg5e16ff_v_u16m1_m(...) __riscv_vlseg5e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_u16mf2(...) __riscv_vlseg5e16ff_v_u16mf2(__VA_ARGS__) |
| #define | vlseg5e16ff_v_u16mf2_m(...) __riscv_vlseg5e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e16ff_v_u16mf4(...) __riscv_vlseg5e16ff_v_u16mf4(__VA_ARGS__) |
| #define | vlseg5e16ff_v_u16mf4_m(...) __riscv_vlseg5e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e32_v_f32m1(...) __riscv_vlseg5e32_v_f32m1(__VA_ARGS__) |
| #define | vlseg5e32_v_f32m1_m(...) __riscv_vlseg5e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg5e32_v_f32mf2(...) __riscv_vlseg5e32_v_f32mf2(__VA_ARGS__) |
| #define | vlseg5e32_v_f32mf2_m(...) __riscv_vlseg5e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e32_v_i32m1(...) __riscv_vlseg5e32_v_i32m1(__VA_ARGS__) |
| #define | vlseg5e32_v_i32m1_m(...) __riscv_vlseg5e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg5e32_v_i32mf2(...) __riscv_vlseg5e32_v_i32mf2(__VA_ARGS__) |
| #define | vlseg5e32_v_i32mf2_m(...) __riscv_vlseg5e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e32_v_u32m1(...) __riscv_vlseg5e32_v_u32m1(__VA_ARGS__) |
| #define | vlseg5e32_v_u32m1_m(...) __riscv_vlseg5e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg5e32_v_u32mf2(...) __riscv_vlseg5e32_v_u32mf2(__VA_ARGS__) |
| #define | vlseg5e32_v_u32mf2_m(...) __riscv_vlseg5e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e32ff_v_f32m1(...) __riscv_vlseg5e32ff_v_f32m1(__VA_ARGS__) |
| #define | vlseg5e32ff_v_f32m1_m(...) __riscv_vlseg5e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg5e32ff_v_f32mf2(...) __riscv_vlseg5e32ff_v_f32mf2(__VA_ARGS__) |
| #define | vlseg5e32ff_v_f32mf2_m(...) __riscv_vlseg5e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e32ff_v_i32m1(...) __riscv_vlseg5e32ff_v_i32m1(__VA_ARGS__) |
| #define | vlseg5e32ff_v_i32m1_m(...) __riscv_vlseg5e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg5e32ff_v_i32mf2(...) __riscv_vlseg5e32ff_v_i32mf2(__VA_ARGS__) |
| #define | vlseg5e32ff_v_i32mf2_m(...) __riscv_vlseg5e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e32ff_v_u32m1(...) __riscv_vlseg5e32ff_v_u32m1(__VA_ARGS__) |
| #define | vlseg5e32ff_v_u32m1_m(...) __riscv_vlseg5e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg5e32ff_v_u32mf2(...) __riscv_vlseg5e32ff_v_u32mf2(__VA_ARGS__) |
| #define | vlseg5e32ff_v_u32mf2_m(...) __riscv_vlseg5e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e64_v_f64m1(...) __riscv_vlseg5e64_v_f64m1(__VA_ARGS__) |
| #define | vlseg5e64_v_f64m1_m(...) __riscv_vlseg5e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg5e64_v_i64m1(...) __riscv_vlseg5e64_v_i64m1(__VA_ARGS__) |
| #define | vlseg5e64_v_i64m1_m(...) __riscv_vlseg5e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg5e64_v_u64m1(...) __riscv_vlseg5e64_v_u64m1(__VA_ARGS__) |
| #define | vlseg5e64_v_u64m1_m(...) __riscv_vlseg5e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg5e64ff_v_f64m1(...) __riscv_vlseg5e64ff_v_f64m1(__VA_ARGS__) |
| #define | vlseg5e64ff_v_f64m1_m(...) __riscv_vlseg5e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg5e64ff_v_i64m1(...) __riscv_vlseg5e64ff_v_i64m1(__VA_ARGS__) |
| #define | vlseg5e64ff_v_i64m1_m(...) __riscv_vlseg5e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg5e64ff_v_u64m1(...) __riscv_vlseg5e64ff_v_u64m1(__VA_ARGS__) |
| #define | vlseg5e64ff_v_u64m1_m(...) __riscv_vlseg5e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg5e8_v_i8m1(...) __riscv_vlseg5e8_v_i8m1(__VA_ARGS__) |
| #define | vlseg5e8_v_i8m1_m(...) __riscv_vlseg5e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg5e8_v_i8mf2(...) __riscv_vlseg5e8_v_i8mf2(__VA_ARGS__) |
| #define | vlseg5e8_v_i8mf2_m(...) __riscv_vlseg5e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e8_v_i8mf4(...) __riscv_vlseg5e8_v_i8mf4(__VA_ARGS__) |
| #define | vlseg5e8_v_i8mf4_m(...) __riscv_vlseg5e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e8_v_i8mf8(...) __riscv_vlseg5e8_v_i8mf8(__VA_ARGS__) |
| #define | vlseg5e8_v_i8mf8_m(...) __riscv_vlseg5e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg5e8_v_u8m1(...) __riscv_vlseg5e8_v_u8m1(__VA_ARGS__) |
| #define | vlseg5e8_v_u8m1_m(...) __riscv_vlseg5e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg5e8_v_u8mf2(...) __riscv_vlseg5e8_v_u8mf2(__VA_ARGS__) |
| #define | vlseg5e8_v_u8mf2_m(...) __riscv_vlseg5e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e8_v_u8mf4(...) __riscv_vlseg5e8_v_u8mf4(__VA_ARGS__) |
| #define | vlseg5e8_v_u8mf4_m(...) __riscv_vlseg5e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e8_v_u8mf8(...) __riscv_vlseg5e8_v_u8mf8(__VA_ARGS__) |
| #define | vlseg5e8_v_u8mf8_m(...) __riscv_vlseg5e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg5e8ff_v_i8m1(...) __riscv_vlseg5e8ff_v_i8m1(__VA_ARGS__) |
| #define | vlseg5e8ff_v_i8m1_m(...) __riscv_vlseg5e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg5e8ff_v_i8mf2(...) __riscv_vlseg5e8ff_v_i8mf2(__VA_ARGS__) |
| #define | vlseg5e8ff_v_i8mf2_m(...) __riscv_vlseg5e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e8ff_v_i8mf4(...) __riscv_vlseg5e8ff_v_i8mf4(__VA_ARGS__) |
| #define | vlseg5e8ff_v_i8mf4_m(...) __riscv_vlseg5e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e8ff_v_i8mf8(...) __riscv_vlseg5e8ff_v_i8mf8(__VA_ARGS__) |
| #define | vlseg5e8ff_v_i8mf8_m(...) __riscv_vlseg5e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg5e8ff_v_u8m1(...) __riscv_vlseg5e8ff_v_u8m1(__VA_ARGS__) |
| #define | vlseg5e8ff_v_u8m1_m(...) __riscv_vlseg5e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg5e8ff_v_u8mf2(...) __riscv_vlseg5e8ff_v_u8mf2(__VA_ARGS__) |
| #define | vlseg5e8ff_v_u8mf2_m(...) __riscv_vlseg5e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg5e8ff_v_u8mf4(...) __riscv_vlseg5e8ff_v_u8mf4(__VA_ARGS__) |
| #define | vlseg5e8ff_v_u8mf4_m(...) __riscv_vlseg5e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg5e8ff_v_u8mf8(...) __riscv_vlseg5e8ff_v_u8mf8(__VA_ARGS__) |
| #define | vlseg5e8ff_v_u8mf8_m(...) __riscv_vlseg5e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_f16m1(...) __riscv_vlseg6e16_v_f16m1(__VA_ARGS__) |
| #define | vlseg6e16_v_f16m1_m(...) __riscv_vlseg6e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_f16mf2(...) __riscv_vlseg6e16_v_f16mf2(__VA_ARGS__) |
| #define | vlseg6e16_v_f16mf2_m(...) __riscv_vlseg6e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_f16mf4(...) __riscv_vlseg6e16_v_f16mf4(__VA_ARGS__) |
| #define | vlseg6e16_v_f16mf4_m(...) __riscv_vlseg6e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_i16m1(...) __riscv_vlseg6e16_v_i16m1(__VA_ARGS__) |
| #define | vlseg6e16_v_i16m1_m(...) __riscv_vlseg6e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_i16mf2(...) __riscv_vlseg6e16_v_i16mf2(__VA_ARGS__) |
| #define | vlseg6e16_v_i16mf2_m(...) __riscv_vlseg6e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_i16mf4(...) __riscv_vlseg6e16_v_i16mf4(__VA_ARGS__) |
| #define | vlseg6e16_v_i16mf4_m(...) __riscv_vlseg6e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_u16m1(...) __riscv_vlseg6e16_v_u16m1(__VA_ARGS__) |
| #define | vlseg6e16_v_u16m1_m(...) __riscv_vlseg6e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_u16mf2(...) __riscv_vlseg6e16_v_u16mf2(__VA_ARGS__) |
| #define | vlseg6e16_v_u16mf2_m(...) __riscv_vlseg6e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e16_v_u16mf4(...) __riscv_vlseg6e16_v_u16mf4(__VA_ARGS__) |
| #define | vlseg6e16_v_u16mf4_m(...) __riscv_vlseg6e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_f16m1(...) __riscv_vlseg6e16ff_v_f16m1(__VA_ARGS__) |
| #define | vlseg6e16ff_v_f16m1_m(...) __riscv_vlseg6e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_f16mf2(...) __riscv_vlseg6e16ff_v_f16mf2(__VA_ARGS__) |
| #define | vlseg6e16ff_v_f16mf2_m(...) __riscv_vlseg6e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_f16mf4(...) __riscv_vlseg6e16ff_v_f16mf4(__VA_ARGS__) |
| #define | vlseg6e16ff_v_f16mf4_m(...) __riscv_vlseg6e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_i16m1(...) __riscv_vlseg6e16ff_v_i16m1(__VA_ARGS__) |
| #define | vlseg6e16ff_v_i16m1_m(...) __riscv_vlseg6e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_i16mf2(...) __riscv_vlseg6e16ff_v_i16mf2(__VA_ARGS__) |
| #define | vlseg6e16ff_v_i16mf2_m(...) __riscv_vlseg6e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_i16mf4(...) __riscv_vlseg6e16ff_v_i16mf4(__VA_ARGS__) |
| #define | vlseg6e16ff_v_i16mf4_m(...) __riscv_vlseg6e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_u16m1(...) __riscv_vlseg6e16ff_v_u16m1(__VA_ARGS__) |
| #define | vlseg6e16ff_v_u16m1_m(...) __riscv_vlseg6e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_u16mf2(...) __riscv_vlseg6e16ff_v_u16mf2(__VA_ARGS__) |
| #define | vlseg6e16ff_v_u16mf2_m(...) __riscv_vlseg6e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e16ff_v_u16mf4(...) __riscv_vlseg6e16ff_v_u16mf4(__VA_ARGS__) |
| #define | vlseg6e16ff_v_u16mf4_m(...) __riscv_vlseg6e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e32_v_f32m1(...) __riscv_vlseg6e32_v_f32m1(__VA_ARGS__) |
| #define | vlseg6e32_v_f32m1_m(...) __riscv_vlseg6e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg6e32_v_f32mf2(...) __riscv_vlseg6e32_v_f32mf2(__VA_ARGS__) |
| #define | vlseg6e32_v_f32mf2_m(...) __riscv_vlseg6e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e32_v_i32m1(...) __riscv_vlseg6e32_v_i32m1(__VA_ARGS__) |
| #define | vlseg6e32_v_i32m1_m(...) __riscv_vlseg6e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg6e32_v_i32mf2(...) __riscv_vlseg6e32_v_i32mf2(__VA_ARGS__) |
| #define | vlseg6e32_v_i32mf2_m(...) __riscv_vlseg6e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e32_v_u32m1(...) __riscv_vlseg6e32_v_u32m1(__VA_ARGS__) |
| #define | vlseg6e32_v_u32m1_m(...) __riscv_vlseg6e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg6e32_v_u32mf2(...) __riscv_vlseg6e32_v_u32mf2(__VA_ARGS__) |
| #define | vlseg6e32_v_u32mf2_m(...) __riscv_vlseg6e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e32ff_v_f32m1(...) __riscv_vlseg6e32ff_v_f32m1(__VA_ARGS__) |
| #define | vlseg6e32ff_v_f32m1_m(...) __riscv_vlseg6e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg6e32ff_v_f32mf2(...) __riscv_vlseg6e32ff_v_f32mf2(__VA_ARGS__) |
| #define | vlseg6e32ff_v_f32mf2_m(...) __riscv_vlseg6e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e32ff_v_i32m1(...) __riscv_vlseg6e32ff_v_i32m1(__VA_ARGS__) |
| #define | vlseg6e32ff_v_i32m1_m(...) __riscv_vlseg6e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg6e32ff_v_i32mf2(...) __riscv_vlseg6e32ff_v_i32mf2(__VA_ARGS__) |
| #define | vlseg6e32ff_v_i32mf2_m(...) __riscv_vlseg6e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e32ff_v_u32m1(...) __riscv_vlseg6e32ff_v_u32m1(__VA_ARGS__) |
| #define | vlseg6e32ff_v_u32m1_m(...) __riscv_vlseg6e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg6e32ff_v_u32mf2(...) __riscv_vlseg6e32ff_v_u32mf2(__VA_ARGS__) |
| #define | vlseg6e32ff_v_u32mf2_m(...) __riscv_vlseg6e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e64_v_f64m1(...) __riscv_vlseg6e64_v_f64m1(__VA_ARGS__) |
| #define | vlseg6e64_v_f64m1_m(...) __riscv_vlseg6e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg6e64_v_i64m1(...) __riscv_vlseg6e64_v_i64m1(__VA_ARGS__) |
| #define | vlseg6e64_v_i64m1_m(...) __riscv_vlseg6e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg6e64_v_u64m1(...) __riscv_vlseg6e64_v_u64m1(__VA_ARGS__) |
| #define | vlseg6e64_v_u64m1_m(...) __riscv_vlseg6e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg6e64ff_v_f64m1(...) __riscv_vlseg6e64ff_v_f64m1(__VA_ARGS__) |
| #define | vlseg6e64ff_v_f64m1_m(...) __riscv_vlseg6e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg6e64ff_v_i64m1(...) __riscv_vlseg6e64ff_v_i64m1(__VA_ARGS__) |
| #define | vlseg6e64ff_v_i64m1_m(...) __riscv_vlseg6e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg6e64ff_v_u64m1(...) __riscv_vlseg6e64ff_v_u64m1(__VA_ARGS__) |
| #define | vlseg6e64ff_v_u64m1_m(...) __riscv_vlseg6e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg6e8_v_i8m1(...) __riscv_vlseg6e8_v_i8m1(__VA_ARGS__) |
| #define | vlseg6e8_v_i8m1_m(...) __riscv_vlseg6e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg6e8_v_i8mf2(...) __riscv_vlseg6e8_v_i8mf2(__VA_ARGS__) |
| #define | vlseg6e8_v_i8mf2_m(...) __riscv_vlseg6e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e8_v_i8mf4(...) __riscv_vlseg6e8_v_i8mf4(__VA_ARGS__) |
| #define | vlseg6e8_v_i8mf4_m(...) __riscv_vlseg6e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e8_v_i8mf8(...) __riscv_vlseg6e8_v_i8mf8(__VA_ARGS__) |
| #define | vlseg6e8_v_i8mf8_m(...) __riscv_vlseg6e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg6e8_v_u8m1(...) __riscv_vlseg6e8_v_u8m1(__VA_ARGS__) |
| #define | vlseg6e8_v_u8m1_m(...) __riscv_vlseg6e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg6e8_v_u8mf2(...) __riscv_vlseg6e8_v_u8mf2(__VA_ARGS__) |
| #define | vlseg6e8_v_u8mf2_m(...) __riscv_vlseg6e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e8_v_u8mf4(...) __riscv_vlseg6e8_v_u8mf4(__VA_ARGS__) |
| #define | vlseg6e8_v_u8mf4_m(...) __riscv_vlseg6e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e8_v_u8mf8(...) __riscv_vlseg6e8_v_u8mf8(__VA_ARGS__) |
| #define | vlseg6e8_v_u8mf8_m(...) __riscv_vlseg6e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg6e8ff_v_i8m1(...) __riscv_vlseg6e8ff_v_i8m1(__VA_ARGS__) |
| #define | vlseg6e8ff_v_i8m1_m(...) __riscv_vlseg6e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg6e8ff_v_i8mf2(...) __riscv_vlseg6e8ff_v_i8mf2(__VA_ARGS__) |
| #define | vlseg6e8ff_v_i8mf2_m(...) __riscv_vlseg6e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e8ff_v_i8mf4(...) __riscv_vlseg6e8ff_v_i8mf4(__VA_ARGS__) |
| #define | vlseg6e8ff_v_i8mf4_m(...) __riscv_vlseg6e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e8ff_v_i8mf8(...) __riscv_vlseg6e8ff_v_i8mf8(__VA_ARGS__) |
| #define | vlseg6e8ff_v_i8mf8_m(...) __riscv_vlseg6e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg6e8ff_v_u8m1(...) __riscv_vlseg6e8ff_v_u8m1(__VA_ARGS__) |
| #define | vlseg6e8ff_v_u8m1_m(...) __riscv_vlseg6e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg6e8ff_v_u8mf2(...) __riscv_vlseg6e8ff_v_u8mf2(__VA_ARGS__) |
| #define | vlseg6e8ff_v_u8mf2_m(...) __riscv_vlseg6e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg6e8ff_v_u8mf4(...) __riscv_vlseg6e8ff_v_u8mf4(__VA_ARGS__) |
| #define | vlseg6e8ff_v_u8mf4_m(...) __riscv_vlseg6e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg6e8ff_v_u8mf8(...) __riscv_vlseg6e8ff_v_u8mf8(__VA_ARGS__) |
| #define | vlseg6e8ff_v_u8mf8_m(...) __riscv_vlseg6e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_f16m1(...) __riscv_vlseg7e16_v_f16m1(__VA_ARGS__) |
| #define | vlseg7e16_v_f16m1_m(...) __riscv_vlseg7e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_f16mf2(...) __riscv_vlseg7e16_v_f16mf2(__VA_ARGS__) |
| #define | vlseg7e16_v_f16mf2_m(...) __riscv_vlseg7e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_f16mf4(...) __riscv_vlseg7e16_v_f16mf4(__VA_ARGS__) |
| #define | vlseg7e16_v_f16mf4_m(...) __riscv_vlseg7e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_i16m1(...) __riscv_vlseg7e16_v_i16m1(__VA_ARGS__) |
| #define | vlseg7e16_v_i16m1_m(...) __riscv_vlseg7e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_i16mf2(...) __riscv_vlseg7e16_v_i16mf2(__VA_ARGS__) |
| #define | vlseg7e16_v_i16mf2_m(...) __riscv_vlseg7e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_i16mf4(...) __riscv_vlseg7e16_v_i16mf4(__VA_ARGS__) |
| #define | vlseg7e16_v_i16mf4_m(...) __riscv_vlseg7e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_u16m1(...) __riscv_vlseg7e16_v_u16m1(__VA_ARGS__) |
| #define | vlseg7e16_v_u16m1_m(...) __riscv_vlseg7e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_u16mf2(...) __riscv_vlseg7e16_v_u16mf2(__VA_ARGS__) |
| #define | vlseg7e16_v_u16mf2_m(...) __riscv_vlseg7e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e16_v_u16mf4(...) __riscv_vlseg7e16_v_u16mf4(__VA_ARGS__) |
| #define | vlseg7e16_v_u16mf4_m(...) __riscv_vlseg7e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_f16m1(...) __riscv_vlseg7e16ff_v_f16m1(__VA_ARGS__) |
| #define | vlseg7e16ff_v_f16m1_m(...) __riscv_vlseg7e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_f16mf2(...) __riscv_vlseg7e16ff_v_f16mf2(__VA_ARGS__) |
| #define | vlseg7e16ff_v_f16mf2_m(...) __riscv_vlseg7e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_f16mf4(...) __riscv_vlseg7e16ff_v_f16mf4(__VA_ARGS__) |
| #define | vlseg7e16ff_v_f16mf4_m(...) __riscv_vlseg7e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_i16m1(...) __riscv_vlseg7e16ff_v_i16m1(__VA_ARGS__) |
| #define | vlseg7e16ff_v_i16m1_m(...) __riscv_vlseg7e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_i16mf2(...) __riscv_vlseg7e16ff_v_i16mf2(__VA_ARGS__) |
| #define | vlseg7e16ff_v_i16mf2_m(...) __riscv_vlseg7e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_i16mf4(...) __riscv_vlseg7e16ff_v_i16mf4(__VA_ARGS__) |
| #define | vlseg7e16ff_v_i16mf4_m(...) __riscv_vlseg7e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_u16m1(...) __riscv_vlseg7e16ff_v_u16m1(__VA_ARGS__) |
| #define | vlseg7e16ff_v_u16m1_m(...) __riscv_vlseg7e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_u16mf2(...) __riscv_vlseg7e16ff_v_u16mf2(__VA_ARGS__) |
| #define | vlseg7e16ff_v_u16mf2_m(...) __riscv_vlseg7e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e16ff_v_u16mf4(...) __riscv_vlseg7e16ff_v_u16mf4(__VA_ARGS__) |
| #define | vlseg7e16ff_v_u16mf4_m(...) __riscv_vlseg7e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e32_v_f32m1(...) __riscv_vlseg7e32_v_f32m1(__VA_ARGS__) |
| #define | vlseg7e32_v_f32m1_m(...) __riscv_vlseg7e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg7e32_v_f32mf2(...) __riscv_vlseg7e32_v_f32mf2(__VA_ARGS__) |
| #define | vlseg7e32_v_f32mf2_m(...) __riscv_vlseg7e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e32_v_i32m1(...) __riscv_vlseg7e32_v_i32m1(__VA_ARGS__) |
| #define | vlseg7e32_v_i32m1_m(...) __riscv_vlseg7e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg7e32_v_i32mf2(...) __riscv_vlseg7e32_v_i32mf2(__VA_ARGS__) |
| #define | vlseg7e32_v_i32mf2_m(...) __riscv_vlseg7e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e32_v_u32m1(...) __riscv_vlseg7e32_v_u32m1(__VA_ARGS__) |
| #define | vlseg7e32_v_u32m1_m(...) __riscv_vlseg7e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg7e32_v_u32mf2(...) __riscv_vlseg7e32_v_u32mf2(__VA_ARGS__) |
| #define | vlseg7e32_v_u32mf2_m(...) __riscv_vlseg7e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e32ff_v_f32m1(...) __riscv_vlseg7e32ff_v_f32m1(__VA_ARGS__) |
| #define | vlseg7e32ff_v_f32m1_m(...) __riscv_vlseg7e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg7e32ff_v_f32mf2(...) __riscv_vlseg7e32ff_v_f32mf2(__VA_ARGS__) |
| #define | vlseg7e32ff_v_f32mf2_m(...) __riscv_vlseg7e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e32ff_v_i32m1(...) __riscv_vlseg7e32ff_v_i32m1(__VA_ARGS__) |
| #define | vlseg7e32ff_v_i32m1_m(...) __riscv_vlseg7e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg7e32ff_v_i32mf2(...) __riscv_vlseg7e32ff_v_i32mf2(__VA_ARGS__) |
| #define | vlseg7e32ff_v_i32mf2_m(...) __riscv_vlseg7e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e32ff_v_u32m1(...) __riscv_vlseg7e32ff_v_u32m1(__VA_ARGS__) |
| #define | vlseg7e32ff_v_u32m1_m(...) __riscv_vlseg7e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg7e32ff_v_u32mf2(...) __riscv_vlseg7e32ff_v_u32mf2(__VA_ARGS__) |
| #define | vlseg7e32ff_v_u32mf2_m(...) __riscv_vlseg7e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e64_v_f64m1(...) __riscv_vlseg7e64_v_f64m1(__VA_ARGS__) |
| #define | vlseg7e64_v_f64m1_m(...) __riscv_vlseg7e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg7e64_v_i64m1(...) __riscv_vlseg7e64_v_i64m1(__VA_ARGS__) |
| #define | vlseg7e64_v_i64m1_m(...) __riscv_vlseg7e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg7e64_v_u64m1(...) __riscv_vlseg7e64_v_u64m1(__VA_ARGS__) |
| #define | vlseg7e64_v_u64m1_m(...) __riscv_vlseg7e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg7e64ff_v_f64m1(...) __riscv_vlseg7e64ff_v_f64m1(__VA_ARGS__) |
| #define | vlseg7e64ff_v_f64m1_m(...) __riscv_vlseg7e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg7e64ff_v_i64m1(...) __riscv_vlseg7e64ff_v_i64m1(__VA_ARGS__) |
| #define | vlseg7e64ff_v_i64m1_m(...) __riscv_vlseg7e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg7e64ff_v_u64m1(...) __riscv_vlseg7e64ff_v_u64m1(__VA_ARGS__) |
| #define | vlseg7e64ff_v_u64m1_m(...) __riscv_vlseg7e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg7e8_v_i8m1(...) __riscv_vlseg7e8_v_i8m1(__VA_ARGS__) |
| #define | vlseg7e8_v_i8m1_m(...) __riscv_vlseg7e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg7e8_v_i8mf2(...) __riscv_vlseg7e8_v_i8mf2(__VA_ARGS__) |
| #define | vlseg7e8_v_i8mf2_m(...) __riscv_vlseg7e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e8_v_i8mf4(...) __riscv_vlseg7e8_v_i8mf4(__VA_ARGS__) |
| #define | vlseg7e8_v_i8mf4_m(...) __riscv_vlseg7e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e8_v_i8mf8(...) __riscv_vlseg7e8_v_i8mf8(__VA_ARGS__) |
| #define | vlseg7e8_v_i8mf8_m(...) __riscv_vlseg7e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg7e8_v_u8m1(...) __riscv_vlseg7e8_v_u8m1(__VA_ARGS__) |
| #define | vlseg7e8_v_u8m1_m(...) __riscv_vlseg7e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg7e8_v_u8mf2(...) __riscv_vlseg7e8_v_u8mf2(__VA_ARGS__) |
| #define | vlseg7e8_v_u8mf2_m(...) __riscv_vlseg7e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e8_v_u8mf4(...) __riscv_vlseg7e8_v_u8mf4(__VA_ARGS__) |
| #define | vlseg7e8_v_u8mf4_m(...) __riscv_vlseg7e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e8_v_u8mf8(...) __riscv_vlseg7e8_v_u8mf8(__VA_ARGS__) |
| #define | vlseg7e8_v_u8mf8_m(...) __riscv_vlseg7e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg7e8ff_v_i8m1(...) __riscv_vlseg7e8ff_v_i8m1(__VA_ARGS__) |
| #define | vlseg7e8ff_v_i8m1_m(...) __riscv_vlseg7e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg7e8ff_v_i8mf2(...) __riscv_vlseg7e8ff_v_i8mf2(__VA_ARGS__) |
| #define | vlseg7e8ff_v_i8mf2_m(...) __riscv_vlseg7e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e8ff_v_i8mf4(...) __riscv_vlseg7e8ff_v_i8mf4(__VA_ARGS__) |
| #define | vlseg7e8ff_v_i8mf4_m(...) __riscv_vlseg7e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e8ff_v_i8mf8(...) __riscv_vlseg7e8ff_v_i8mf8(__VA_ARGS__) |
| #define | vlseg7e8ff_v_i8mf8_m(...) __riscv_vlseg7e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg7e8ff_v_u8m1(...) __riscv_vlseg7e8ff_v_u8m1(__VA_ARGS__) |
| #define | vlseg7e8ff_v_u8m1_m(...) __riscv_vlseg7e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg7e8ff_v_u8mf2(...) __riscv_vlseg7e8ff_v_u8mf2(__VA_ARGS__) |
| #define | vlseg7e8ff_v_u8mf2_m(...) __riscv_vlseg7e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg7e8ff_v_u8mf4(...) __riscv_vlseg7e8ff_v_u8mf4(__VA_ARGS__) |
| #define | vlseg7e8ff_v_u8mf4_m(...) __riscv_vlseg7e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg7e8ff_v_u8mf8(...) __riscv_vlseg7e8ff_v_u8mf8(__VA_ARGS__) |
| #define | vlseg7e8ff_v_u8mf8_m(...) __riscv_vlseg7e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_f16m1(...) __riscv_vlseg8e16_v_f16m1(__VA_ARGS__) |
| #define | vlseg8e16_v_f16m1_m(...) __riscv_vlseg8e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_f16mf2(...) __riscv_vlseg8e16_v_f16mf2(__VA_ARGS__) |
| #define | vlseg8e16_v_f16mf2_m(...) __riscv_vlseg8e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_f16mf4(...) __riscv_vlseg8e16_v_f16mf4(__VA_ARGS__) |
| #define | vlseg8e16_v_f16mf4_m(...) __riscv_vlseg8e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_i16m1(...) __riscv_vlseg8e16_v_i16m1(__VA_ARGS__) |
| #define | vlseg8e16_v_i16m1_m(...) __riscv_vlseg8e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_i16mf2(...) __riscv_vlseg8e16_v_i16mf2(__VA_ARGS__) |
| #define | vlseg8e16_v_i16mf2_m(...) __riscv_vlseg8e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_i16mf4(...) __riscv_vlseg8e16_v_i16mf4(__VA_ARGS__) |
| #define | vlseg8e16_v_i16mf4_m(...) __riscv_vlseg8e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_u16m1(...) __riscv_vlseg8e16_v_u16m1(__VA_ARGS__) |
| #define | vlseg8e16_v_u16m1_m(...) __riscv_vlseg8e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_u16mf2(...) __riscv_vlseg8e16_v_u16mf2(__VA_ARGS__) |
| #define | vlseg8e16_v_u16mf2_m(...) __riscv_vlseg8e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e16_v_u16mf4(...) __riscv_vlseg8e16_v_u16mf4(__VA_ARGS__) |
| #define | vlseg8e16_v_u16mf4_m(...) __riscv_vlseg8e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_f16m1(...) __riscv_vlseg8e16ff_v_f16m1(__VA_ARGS__) |
| #define | vlseg8e16ff_v_f16m1_m(...) __riscv_vlseg8e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_f16mf2(...) __riscv_vlseg8e16ff_v_f16mf2(__VA_ARGS__) |
| #define | vlseg8e16ff_v_f16mf2_m(...) __riscv_vlseg8e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_f16mf4(...) __riscv_vlseg8e16ff_v_f16mf4(__VA_ARGS__) |
| #define | vlseg8e16ff_v_f16mf4_m(...) __riscv_vlseg8e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_i16m1(...) __riscv_vlseg8e16ff_v_i16m1(__VA_ARGS__) |
| #define | vlseg8e16ff_v_i16m1_m(...) __riscv_vlseg8e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_i16mf2(...) __riscv_vlseg8e16ff_v_i16mf2(__VA_ARGS__) |
| #define | vlseg8e16ff_v_i16mf2_m(...) __riscv_vlseg8e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_i16mf4(...) __riscv_vlseg8e16ff_v_i16mf4(__VA_ARGS__) |
| #define | vlseg8e16ff_v_i16mf4_m(...) __riscv_vlseg8e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_u16m1(...) __riscv_vlseg8e16ff_v_u16m1(__VA_ARGS__) |
| #define | vlseg8e16ff_v_u16m1_m(...) __riscv_vlseg8e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_u16mf2(...) __riscv_vlseg8e16ff_v_u16mf2(__VA_ARGS__) |
| #define | vlseg8e16ff_v_u16mf2_m(...) __riscv_vlseg8e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e16ff_v_u16mf4(...) __riscv_vlseg8e16ff_v_u16mf4(__VA_ARGS__) |
| #define | vlseg8e16ff_v_u16mf4_m(...) __riscv_vlseg8e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e32_v_f32m1(...) __riscv_vlseg8e32_v_f32m1(__VA_ARGS__) |
| #define | vlseg8e32_v_f32m1_m(...) __riscv_vlseg8e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg8e32_v_f32mf2(...) __riscv_vlseg8e32_v_f32mf2(__VA_ARGS__) |
| #define | vlseg8e32_v_f32mf2_m(...) __riscv_vlseg8e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e32_v_i32m1(...) __riscv_vlseg8e32_v_i32m1(__VA_ARGS__) |
| #define | vlseg8e32_v_i32m1_m(...) __riscv_vlseg8e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg8e32_v_i32mf2(...) __riscv_vlseg8e32_v_i32mf2(__VA_ARGS__) |
| #define | vlseg8e32_v_i32mf2_m(...) __riscv_vlseg8e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e32_v_u32m1(...) __riscv_vlseg8e32_v_u32m1(__VA_ARGS__) |
| #define | vlseg8e32_v_u32m1_m(...) __riscv_vlseg8e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg8e32_v_u32mf2(...) __riscv_vlseg8e32_v_u32mf2(__VA_ARGS__) |
| #define | vlseg8e32_v_u32mf2_m(...) __riscv_vlseg8e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e32ff_v_f32m1(...) __riscv_vlseg8e32ff_v_f32m1(__VA_ARGS__) |
| #define | vlseg8e32ff_v_f32m1_m(...) __riscv_vlseg8e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlseg8e32ff_v_f32mf2(...) __riscv_vlseg8e32ff_v_f32mf2(__VA_ARGS__) |
| #define | vlseg8e32ff_v_f32mf2_m(...) __riscv_vlseg8e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e32ff_v_i32m1(...) __riscv_vlseg8e32ff_v_i32m1(__VA_ARGS__) |
| #define | vlseg8e32ff_v_i32m1_m(...) __riscv_vlseg8e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlseg8e32ff_v_i32mf2(...) __riscv_vlseg8e32ff_v_i32mf2(__VA_ARGS__) |
| #define | vlseg8e32ff_v_i32mf2_m(...) __riscv_vlseg8e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e32ff_v_u32m1(...) __riscv_vlseg8e32ff_v_u32m1(__VA_ARGS__) |
| #define | vlseg8e32ff_v_u32m1_m(...) __riscv_vlseg8e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlseg8e32ff_v_u32mf2(...) __riscv_vlseg8e32ff_v_u32mf2(__VA_ARGS__) |
| #define | vlseg8e32ff_v_u32mf2_m(...) __riscv_vlseg8e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e64_v_f64m1(...) __riscv_vlseg8e64_v_f64m1(__VA_ARGS__) |
| #define | vlseg8e64_v_f64m1_m(...) __riscv_vlseg8e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg8e64_v_i64m1(...) __riscv_vlseg8e64_v_i64m1(__VA_ARGS__) |
| #define | vlseg8e64_v_i64m1_m(...) __riscv_vlseg8e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg8e64_v_u64m1(...) __riscv_vlseg8e64_v_u64m1(__VA_ARGS__) |
| #define | vlseg8e64_v_u64m1_m(...) __riscv_vlseg8e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg8e64ff_v_f64m1(...) __riscv_vlseg8e64ff_v_f64m1(__VA_ARGS__) |
| #define | vlseg8e64ff_v_f64m1_m(...) __riscv_vlseg8e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlseg8e64ff_v_i64m1(...) __riscv_vlseg8e64ff_v_i64m1(__VA_ARGS__) |
| #define | vlseg8e64ff_v_i64m1_m(...) __riscv_vlseg8e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlseg8e64ff_v_u64m1(...) __riscv_vlseg8e64ff_v_u64m1(__VA_ARGS__) |
| #define | vlseg8e64ff_v_u64m1_m(...) __riscv_vlseg8e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlseg8e8_v_i8m1(...) __riscv_vlseg8e8_v_i8m1(__VA_ARGS__) |
| #define | vlseg8e8_v_i8m1_m(...) __riscv_vlseg8e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg8e8_v_i8mf2(...) __riscv_vlseg8e8_v_i8mf2(__VA_ARGS__) |
| #define | vlseg8e8_v_i8mf2_m(...) __riscv_vlseg8e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e8_v_i8mf4(...) __riscv_vlseg8e8_v_i8mf4(__VA_ARGS__) |
| #define | vlseg8e8_v_i8mf4_m(...) __riscv_vlseg8e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e8_v_i8mf8(...) __riscv_vlseg8e8_v_i8mf8(__VA_ARGS__) |
| #define | vlseg8e8_v_i8mf8_m(...) __riscv_vlseg8e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg8e8_v_u8m1(...) __riscv_vlseg8e8_v_u8m1(__VA_ARGS__) |
| #define | vlseg8e8_v_u8m1_m(...) __riscv_vlseg8e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg8e8_v_u8mf2(...) __riscv_vlseg8e8_v_u8mf2(__VA_ARGS__) |
| #define | vlseg8e8_v_u8mf2_m(...) __riscv_vlseg8e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e8_v_u8mf4(...) __riscv_vlseg8e8_v_u8mf4(__VA_ARGS__) |
| #define | vlseg8e8_v_u8mf4_m(...) __riscv_vlseg8e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e8_v_u8mf8(...) __riscv_vlseg8e8_v_u8mf8(__VA_ARGS__) |
| #define | vlseg8e8_v_u8mf8_m(...) __riscv_vlseg8e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlseg8e8ff_v_i8m1(...) __riscv_vlseg8e8ff_v_i8m1(__VA_ARGS__) |
| #define | vlseg8e8ff_v_i8m1_m(...) __riscv_vlseg8e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlseg8e8ff_v_i8mf2(...) __riscv_vlseg8e8ff_v_i8mf2(__VA_ARGS__) |
| #define | vlseg8e8ff_v_i8mf2_m(...) __riscv_vlseg8e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e8ff_v_i8mf4(...) __riscv_vlseg8e8ff_v_i8mf4(__VA_ARGS__) |
| #define | vlseg8e8ff_v_i8mf4_m(...) __riscv_vlseg8e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e8ff_v_i8mf8(...) __riscv_vlseg8e8ff_v_i8mf8(__VA_ARGS__) |
| #define | vlseg8e8ff_v_i8mf8_m(...) __riscv_vlseg8e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlseg8e8ff_v_u8m1(...) __riscv_vlseg8e8ff_v_u8m1(__VA_ARGS__) |
| #define | vlseg8e8ff_v_u8m1_m(...) __riscv_vlseg8e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlseg8e8ff_v_u8mf2(...) __riscv_vlseg8e8ff_v_u8mf2(__VA_ARGS__) |
| #define | vlseg8e8ff_v_u8mf2_m(...) __riscv_vlseg8e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlseg8e8ff_v_u8mf4(...) __riscv_vlseg8e8ff_v_u8mf4(__VA_ARGS__) |
| #define | vlseg8e8ff_v_u8mf4_m(...) __riscv_vlseg8e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlseg8e8ff_v_u8mf8(...) __riscv_vlseg8e8ff_v_u8mf8(__VA_ARGS__) |
| #define | vlseg8e8ff_v_u8mf8_m(...) __riscv_vlseg8e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16m1(...) __riscv_vlsseg2e16_v_f16m1(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16m1_m(...) __riscv_vlsseg2e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16m2(...) __riscv_vlsseg2e16_v_f16m2(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16m2_m(...) __riscv_vlsseg2e16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16m4(...) __riscv_vlsseg2e16_v_f16m4(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16m4_m(...) __riscv_vlsseg2e16_v_f16m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16mf2(...) __riscv_vlsseg2e16_v_f16mf2(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16mf2_m(...) __riscv_vlsseg2e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16mf4(...) __riscv_vlsseg2e16_v_f16mf4(__VA_ARGS__) |
| #define | vlsseg2e16_v_f16mf4_m(...) __riscv_vlsseg2e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16m1(...) __riscv_vlsseg2e16_v_i16m1(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16m1_m(...) __riscv_vlsseg2e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16m2(...) __riscv_vlsseg2e16_v_i16m2(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16m2_m(...) __riscv_vlsseg2e16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16m4(...) __riscv_vlsseg2e16_v_i16m4(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16m4_m(...) __riscv_vlsseg2e16_v_i16m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16mf2(...) __riscv_vlsseg2e16_v_i16mf2(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16mf2_m(...) __riscv_vlsseg2e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16mf4(...) __riscv_vlsseg2e16_v_i16mf4(__VA_ARGS__) |
| #define | vlsseg2e16_v_i16mf4_m(...) __riscv_vlsseg2e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16m1(...) __riscv_vlsseg2e16_v_u16m1(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16m1_m(...) __riscv_vlsseg2e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16m2(...) __riscv_vlsseg2e16_v_u16m2(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16m2_m(...) __riscv_vlsseg2e16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16m4(...) __riscv_vlsseg2e16_v_u16m4(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16m4_m(...) __riscv_vlsseg2e16_v_u16m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16mf2(...) __riscv_vlsseg2e16_v_u16mf2(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16mf2_m(...) __riscv_vlsseg2e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16mf4(...) __riscv_vlsseg2e16_v_u16mf4(__VA_ARGS__) |
| #define | vlsseg2e16_v_u16mf4_m(...) __riscv_vlsseg2e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_f32m1(...) __riscv_vlsseg2e32_v_f32m1(__VA_ARGS__) |
| #define | vlsseg2e32_v_f32m1_m(...) __riscv_vlsseg2e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_f32m2(...) __riscv_vlsseg2e32_v_f32m2(__VA_ARGS__) |
| #define | vlsseg2e32_v_f32m2_m(...) __riscv_vlsseg2e32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_f32m4(...) __riscv_vlsseg2e32_v_f32m4(__VA_ARGS__) |
| #define | vlsseg2e32_v_f32m4_m(...) __riscv_vlsseg2e32_v_f32m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_f32mf2(...) __riscv_vlsseg2e32_v_f32mf2(__VA_ARGS__) |
| #define | vlsseg2e32_v_f32mf2_m(...) __riscv_vlsseg2e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_i32m1(...) __riscv_vlsseg2e32_v_i32m1(__VA_ARGS__) |
| #define | vlsseg2e32_v_i32m1_m(...) __riscv_vlsseg2e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_i32m2(...) __riscv_vlsseg2e32_v_i32m2(__VA_ARGS__) |
| #define | vlsseg2e32_v_i32m2_m(...) __riscv_vlsseg2e32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_i32m4(...) __riscv_vlsseg2e32_v_i32m4(__VA_ARGS__) |
| #define | vlsseg2e32_v_i32m4_m(...) __riscv_vlsseg2e32_v_i32m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_i32mf2(...) __riscv_vlsseg2e32_v_i32mf2(__VA_ARGS__) |
| #define | vlsseg2e32_v_i32mf2_m(...) __riscv_vlsseg2e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_u32m1(...) __riscv_vlsseg2e32_v_u32m1(__VA_ARGS__) |
| #define | vlsseg2e32_v_u32m1_m(...) __riscv_vlsseg2e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_u32m2(...) __riscv_vlsseg2e32_v_u32m2(__VA_ARGS__) |
| #define | vlsseg2e32_v_u32m2_m(...) __riscv_vlsseg2e32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_u32m4(...) __riscv_vlsseg2e32_v_u32m4(__VA_ARGS__) |
| #define | vlsseg2e32_v_u32m4_m(...) __riscv_vlsseg2e32_v_u32m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e32_v_u32mf2(...) __riscv_vlsseg2e32_v_u32mf2(__VA_ARGS__) |
| #define | vlsseg2e32_v_u32mf2_m(...) __riscv_vlsseg2e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_f64m1(...) __riscv_vlsseg2e64_v_f64m1(__VA_ARGS__) |
| #define | vlsseg2e64_v_f64m1_m(...) __riscv_vlsseg2e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_f64m2(...) __riscv_vlsseg2e64_v_f64m2(__VA_ARGS__) |
| #define | vlsseg2e64_v_f64m2_m(...) __riscv_vlsseg2e64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_f64m4(...) __riscv_vlsseg2e64_v_f64m4(__VA_ARGS__) |
| #define | vlsseg2e64_v_f64m4_m(...) __riscv_vlsseg2e64_v_f64m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_i64m1(...) __riscv_vlsseg2e64_v_i64m1(__VA_ARGS__) |
| #define | vlsseg2e64_v_i64m1_m(...) __riscv_vlsseg2e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_i64m2(...) __riscv_vlsseg2e64_v_i64m2(__VA_ARGS__) |
| #define | vlsseg2e64_v_i64m2_m(...) __riscv_vlsseg2e64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_i64m4(...) __riscv_vlsseg2e64_v_i64m4(__VA_ARGS__) |
| #define | vlsseg2e64_v_i64m4_m(...) __riscv_vlsseg2e64_v_i64m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_u64m1(...) __riscv_vlsseg2e64_v_u64m1(__VA_ARGS__) |
| #define | vlsseg2e64_v_u64m1_m(...) __riscv_vlsseg2e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_u64m2(...) __riscv_vlsseg2e64_v_u64m2(__VA_ARGS__) |
| #define | vlsseg2e64_v_u64m2_m(...) __riscv_vlsseg2e64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e64_v_u64m4(...) __riscv_vlsseg2e64_v_u64m4(__VA_ARGS__) |
| #define | vlsseg2e64_v_u64m4_m(...) __riscv_vlsseg2e64_v_u64m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8m1(...) __riscv_vlsseg2e8_v_i8m1(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8m1_m(...) __riscv_vlsseg2e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8m2(...) __riscv_vlsseg2e8_v_i8m2(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8m2_m(...) __riscv_vlsseg2e8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8m4(...) __riscv_vlsseg2e8_v_i8m4(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8m4_m(...) __riscv_vlsseg2e8_v_i8m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8mf2(...) __riscv_vlsseg2e8_v_i8mf2(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8mf2_m(...) __riscv_vlsseg2e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8mf4(...) __riscv_vlsseg2e8_v_i8mf4(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8mf4_m(...) __riscv_vlsseg2e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8mf8(...) __riscv_vlsseg2e8_v_i8mf8(__VA_ARGS__) |
| #define | vlsseg2e8_v_i8mf8_m(...) __riscv_vlsseg2e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8m1(...) __riscv_vlsseg2e8_v_u8m1(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8m1_m(...) __riscv_vlsseg2e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8m2(...) __riscv_vlsseg2e8_v_u8m2(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8m2_m(...) __riscv_vlsseg2e8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8m4(...) __riscv_vlsseg2e8_v_u8m4(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8m4_m(...) __riscv_vlsseg2e8_v_u8m4_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8mf2(...) __riscv_vlsseg2e8_v_u8mf2(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8mf2_m(...) __riscv_vlsseg2e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8mf4(...) __riscv_vlsseg2e8_v_u8mf4(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8mf4_m(...) __riscv_vlsseg2e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8mf8(...) __riscv_vlsseg2e8_v_u8mf8(__VA_ARGS__) |
| #define | vlsseg2e8_v_u8mf8_m(...) __riscv_vlsseg2e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_f16m1(...) __riscv_vlsseg3e16_v_f16m1(__VA_ARGS__) |
| #define | vlsseg3e16_v_f16m1_m(...) __riscv_vlsseg3e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_f16m2(...) __riscv_vlsseg3e16_v_f16m2(__VA_ARGS__) |
| #define | vlsseg3e16_v_f16m2_m(...) __riscv_vlsseg3e16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_f16mf2(...) __riscv_vlsseg3e16_v_f16mf2(__VA_ARGS__) |
| #define | vlsseg3e16_v_f16mf2_m(...) __riscv_vlsseg3e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_f16mf4(...) __riscv_vlsseg3e16_v_f16mf4(__VA_ARGS__) |
| #define | vlsseg3e16_v_f16mf4_m(...) __riscv_vlsseg3e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_i16m1(...) __riscv_vlsseg3e16_v_i16m1(__VA_ARGS__) |
| #define | vlsseg3e16_v_i16m1_m(...) __riscv_vlsseg3e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_i16m2(...) __riscv_vlsseg3e16_v_i16m2(__VA_ARGS__) |
| #define | vlsseg3e16_v_i16m2_m(...) __riscv_vlsseg3e16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_i16mf2(...) __riscv_vlsseg3e16_v_i16mf2(__VA_ARGS__) |
| #define | vlsseg3e16_v_i16mf2_m(...) __riscv_vlsseg3e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_i16mf4(...) __riscv_vlsseg3e16_v_i16mf4(__VA_ARGS__) |
| #define | vlsseg3e16_v_i16mf4_m(...) __riscv_vlsseg3e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_u16m1(...) __riscv_vlsseg3e16_v_u16m1(__VA_ARGS__) |
| #define | vlsseg3e16_v_u16m1_m(...) __riscv_vlsseg3e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_u16m2(...) __riscv_vlsseg3e16_v_u16m2(__VA_ARGS__) |
| #define | vlsseg3e16_v_u16m2_m(...) __riscv_vlsseg3e16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_u16mf2(...) __riscv_vlsseg3e16_v_u16mf2(__VA_ARGS__) |
| #define | vlsseg3e16_v_u16mf2_m(...) __riscv_vlsseg3e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg3e16_v_u16mf4(...) __riscv_vlsseg3e16_v_u16mf4(__VA_ARGS__) |
| #define | vlsseg3e16_v_u16mf4_m(...) __riscv_vlsseg3e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_f32m1(...) __riscv_vlsseg3e32_v_f32m1(__VA_ARGS__) |
| #define | vlsseg3e32_v_f32m1_m(...) __riscv_vlsseg3e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_f32m2(...) __riscv_vlsseg3e32_v_f32m2(__VA_ARGS__) |
| #define | vlsseg3e32_v_f32m2_m(...) __riscv_vlsseg3e32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_f32mf2(...) __riscv_vlsseg3e32_v_f32mf2(__VA_ARGS__) |
| #define | vlsseg3e32_v_f32mf2_m(...) __riscv_vlsseg3e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_i32m1(...) __riscv_vlsseg3e32_v_i32m1(__VA_ARGS__) |
| #define | vlsseg3e32_v_i32m1_m(...) __riscv_vlsseg3e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_i32m2(...) __riscv_vlsseg3e32_v_i32m2(__VA_ARGS__) |
| #define | vlsseg3e32_v_i32m2_m(...) __riscv_vlsseg3e32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_i32mf2(...) __riscv_vlsseg3e32_v_i32mf2(__VA_ARGS__) |
| #define | vlsseg3e32_v_i32mf2_m(...) __riscv_vlsseg3e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_u32m1(...) __riscv_vlsseg3e32_v_u32m1(__VA_ARGS__) |
| #define | vlsseg3e32_v_u32m1_m(...) __riscv_vlsseg3e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_u32m2(...) __riscv_vlsseg3e32_v_u32m2(__VA_ARGS__) |
| #define | vlsseg3e32_v_u32m2_m(...) __riscv_vlsseg3e32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e32_v_u32mf2(...) __riscv_vlsseg3e32_v_u32mf2(__VA_ARGS__) |
| #define | vlsseg3e32_v_u32mf2_m(...) __riscv_vlsseg3e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg3e64_v_f64m1(...) __riscv_vlsseg3e64_v_f64m1(__VA_ARGS__) |
| #define | vlsseg3e64_v_f64m1_m(...) __riscv_vlsseg3e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e64_v_f64m2(...) __riscv_vlsseg3e64_v_f64m2(__VA_ARGS__) |
| #define | vlsseg3e64_v_f64m2_m(...) __riscv_vlsseg3e64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e64_v_i64m1(...) __riscv_vlsseg3e64_v_i64m1(__VA_ARGS__) |
| #define | vlsseg3e64_v_i64m1_m(...) __riscv_vlsseg3e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e64_v_i64m2(...) __riscv_vlsseg3e64_v_i64m2(__VA_ARGS__) |
| #define | vlsseg3e64_v_i64m2_m(...) __riscv_vlsseg3e64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e64_v_u64m1(...) __riscv_vlsseg3e64_v_u64m1(__VA_ARGS__) |
| #define | vlsseg3e64_v_u64m1_m(...) __riscv_vlsseg3e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e64_v_u64m2(...) __riscv_vlsseg3e64_v_u64m2(__VA_ARGS__) |
| #define | vlsseg3e64_v_u64m2_m(...) __riscv_vlsseg3e64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8m1(...) __riscv_vlsseg3e8_v_i8m1(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8m1_m(...) __riscv_vlsseg3e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8m2(...) __riscv_vlsseg3e8_v_i8m2(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8m2_m(...) __riscv_vlsseg3e8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8mf2(...) __riscv_vlsseg3e8_v_i8mf2(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8mf2_m(...) __riscv_vlsseg3e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8mf4(...) __riscv_vlsseg3e8_v_i8mf4(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8mf4_m(...) __riscv_vlsseg3e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8mf8(...) __riscv_vlsseg3e8_v_i8mf8(__VA_ARGS__) |
| #define | vlsseg3e8_v_i8mf8_m(...) __riscv_vlsseg3e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8m1(...) __riscv_vlsseg3e8_v_u8m1(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8m1_m(...) __riscv_vlsseg3e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8m2(...) __riscv_vlsseg3e8_v_u8m2(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8m2_m(...) __riscv_vlsseg3e8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8mf2(...) __riscv_vlsseg3e8_v_u8mf2(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8mf2_m(...) __riscv_vlsseg3e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8mf4(...) __riscv_vlsseg3e8_v_u8mf4(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8mf4_m(...) __riscv_vlsseg3e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8mf8(...) __riscv_vlsseg3e8_v_u8mf8(__VA_ARGS__) |
| #define | vlsseg3e8_v_u8mf8_m(...) __riscv_vlsseg3e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_f16m1(...) __riscv_vlsseg4e16_v_f16m1(__VA_ARGS__) |
| #define | vlsseg4e16_v_f16m1_m(...) __riscv_vlsseg4e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_f16m2(...) __riscv_vlsseg4e16_v_f16m2(__VA_ARGS__) |
| #define | vlsseg4e16_v_f16m2_m(...) __riscv_vlsseg4e16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_f16mf2(...) __riscv_vlsseg4e16_v_f16mf2(__VA_ARGS__) |
| #define | vlsseg4e16_v_f16mf2_m(...) __riscv_vlsseg4e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_f16mf4(...) __riscv_vlsseg4e16_v_f16mf4(__VA_ARGS__) |
| #define | vlsseg4e16_v_f16mf4_m(...) __riscv_vlsseg4e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_i16m1(...) __riscv_vlsseg4e16_v_i16m1(__VA_ARGS__) |
| #define | vlsseg4e16_v_i16m1_m(...) __riscv_vlsseg4e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_i16m2(...) __riscv_vlsseg4e16_v_i16m2(__VA_ARGS__) |
| #define | vlsseg4e16_v_i16m2_m(...) __riscv_vlsseg4e16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_i16mf2(...) __riscv_vlsseg4e16_v_i16mf2(__VA_ARGS__) |
| #define | vlsseg4e16_v_i16mf2_m(...) __riscv_vlsseg4e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_i16mf4(...) __riscv_vlsseg4e16_v_i16mf4(__VA_ARGS__) |
| #define | vlsseg4e16_v_i16mf4_m(...) __riscv_vlsseg4e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_u16m1(...) __riscv_vlsseg4e16_v_u16m1(__VA_ARGS__) |
| #define | vlsseg4e16_v_u16m1_m(...) __riscv_vlsseg4e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_u16m2(...) __riscv_vlsseg4e16_v_u16m2(__VA_ARGS__) |
| #define | vlsseg4e16_v_u16m2_m(...) __riscv_vlsseg4e16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_u16mf2(...) __riscv_vlsseg4e16_v_u16mf2(__VA_ARGS__) |
| #define | vlsseg4e16_v_u16mf2_m(...) __riscv_vlsseg4e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg4e16_v_u16mf4(...) __riscv_vlsseg4e16_v_u16mf4(__VA_ARGS__) |
| #define | vlsseg4e16_v_u16mf4_m(...) __riscv_vlsseg4e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_f32m1(...) __riscv_vlsseg4e32_v_f32m1(__VA_ARGS__) |
| #define | vlsseg4e32_v_f32m1_m(...) __riscv_vlsseg4e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_f32m2(...) __riscv_vlsseg4e32_v_f32m2(__VA_ARGS__) |
| #define | vlsseg4e32_v_f32m2_m(...) __riscv_vlsseg4e32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_f32mf2(...) __riscv_vlsseg4e32_v_f32mf2(__VA_ARGS__) |
| #define | vlsseg4e32_v_f32mf2_m(...) __riscv_vlsseg4e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_i32m1(...) __riscv_vlsseg4e32_v_i32m1(__VA_ARGS__) |
| #define | vlsseg4e32_v_i32m1_m(...) __riscv_vlsseg4e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_i32m2(...) __riscv_vlsseg4e32_v_i32m2(__VA_ARGS__) |
| #define | vlsseg4e32_v_i32m2_m(...) __riscv_vlsseg4e32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_i32mf2(...) __riscv_vlsseg4e32_v_i32mf2(__VA_ARGS__) |
| #define | vlsseg4e32_v_i32mf2_m(...) __riscv_vlsseg4e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_u32m1(...) __riscv_vlsseg4e32_v_u32m1(__VA_ARGS__) |
| #define | vlsseg4e32_v_u32m1_m(...) __riscv_vlsseg4e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_u32m2(...) __riscv_vlsseg4e32_v_u32m2(__VA_ARGS__) |
| #define | vlsseg4e32_v_u32m2_m(...) __riscv_vlsseg4e32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e32_v_u32mf2(...) __riscv_vlsseg4e32_v_u32mf2(__VA_ARGS__) |
| #define | vlsseg4e32_v_u32mf2_m(...) __riscv_vlsseg4e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg4e64_v_f64m1(...) __riscv_vlsseg4e64_v_f64m1(__VA_ARGS__) |
| #define | vlsseg4e64_v_f64m1_m(...) __riscv_vlsseg4e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e64_v_f64m2(...) __riscv_vlsseg4e64_v_f64m2(__VA_ARGS__) |
| #define | vlsseg4e64_v_f64m2_m(...) __riscv_vlsseg4e64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e64_v_i64m1(...) __riscv_vlsseg4e64_v_i64m1(__VA_ARGS__) |
| #define | vlsseg4e64_v_i64m1_m(...) __riscv_vlsseg4e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e64_v_i64m2(...) __riscv_vlsseg4e64_v_i64m2(__VA_ARGS__) |
| #define | vlsseg4e64_v_i64m2_m(...) __riscv_vlsseg4e64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e64_v_u64m1(...) __riscv_vlsseg4e64_v_u64m1(__VA_ARGS__) |
| #define | vlsseg4e64_v_u64m1_m(...) __riscv_vlsseg4e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e64_v_u64m2(...) __riscv_vlsseg4e64_v_u64m2(__VA_ARGS__) |
| #define | vlsseg4e64_v_u64m2_m(...) __riscv_vlsseg4e64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8m1(...) __riscv_vlsseg4e8_v_i8m1(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8m1_m(...) __riscv_vlsseg4e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8m2(...) __riscv_vlsseg4e8_v_i8m2(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8m2_m(...) __riscv_vlsseg4e8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8mf2(...) __riscv_vlsseg4e8_v_i8mf2(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8mf2_m(...) __riscv_vlsseg4e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8mf4(...) __riscv_vlsseg4e8_v_i8mf4(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8mf4_m(...) __riscv_vlsseg4e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8mf8(...) __riscv_vlsseg4e8_v_i8mf8(__VA_ARGS__) |
| #define | vlsseg4e8_v_i8mf8_m(...) __riscv_vlsseg4e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8m1(...) __riscv_vlsseg4e8_v_u8m1(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8m1_m(...) __riscv_vlsseg4e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8m2(...) __riscv_vlsseg4e8_v_u8m2(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8m2_m(...) __riscv_vlsseg4e8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8mf2(...) __riscv_vlsseg4e8_v_u8mf2(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8mf2_m(...) __riscv_vlsseg4e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8mf4(...) __riscv_vlsseg4e8_v_u8mf4(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8mf4_m(...) __riscv_vlsseg4e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8mf8(...) __riscv_vlsseg4e8_v_u8mf8(__VA_ARGS__) |
| #define | vlsseg4e8_v_u8mf8_m(...) __riscv_vlsseg4e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_f16m1(...) __riscv_vlsseg5e16_v_f16m1(__VA_ARGS__) |
| #define | vlsseg5e16_v_f16m1_m(...) __riscv_vlsseg5e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_f16mf2(...) __riscv_vlsseg5e16_v_f16mf2(__VA_ARGS__) |
| #define | vlsseg5e16_v_f16mf2_m(...) __riscv_vlsseg5e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_f16mf4(...) __riscv_vlsseg5e16_v_f16mf4(__VA_ARGS__) |
| #define | vlsseg5e16_v_f16mf4_m(...) __riscv_vlsseg5e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_i16m1(...) __riscv_vlsseg5e16_v_i16m1(__VA_ARGS__) |
| #define | vlsseg5e16_v_i16m1_m(...) __riscv_vlsseg5e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_i16mf2(...) __riscv_vlsseg5e16_v_i16mf2(__VA_ARGS__) |
| #define | vlsseg5e16_v_i16mf2_m(...) __riscv_vlsseg5e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_i16mf4(...) __riscv_vlsseg5e16_v_i16mf4(__VA_ARGS__) |
| #define | vlsseg5e16_v_i16mf4_m(...) __riscv_vlsseg5e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_u16m1(...) __riscv_vlsseg5e16_v_u16m1(__VA_ARGS__) |
| #define | vlsseg5e16_v_u16m1_m(...) __riscv_vlsseg5e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_u16mf2(...) __riscv_vlsseg5e16_v_u16mf2(__VA_ARGS__) |
| #define | vlsseg5e16_v_u16mf2_m(...) __riscv_vlsseg5e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg5e16_v_u16mf4(...) __riscv_vlsseg5e16_v_u16mf4(__VA_ARGS__) |
| #define | vlsseg5e16_v_u16mf4_m(...) __riscv_vlsseg5e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg5e32_v_f32m1(...) __riscv_vlsseg5e32_v_f32m1(__VA_ARGS__) |
| #define | vlsseg5e32_v_f32m1_m(...) __riscv_vlsseg5e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e32_v_f32mf2(...) __riscv_vlsseg5e32_v_f32mf2(__VA_ARGS__) |
| #define | vlsseg5e32_v_f32mf2_m(...) __riscv_vlsseg5e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg5e32_v_i32m1(...) __riscv_vlsseg5e32_v_i32m1(__VA_ARGS__) |
| #define | vlsseg5e32_v_i32m1_m(...) __riscv_vlsseg5e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e32_v_i32mf2(...) __riscv_vlsseg5e32_v_i32mf2(__VA_ARGS__) |
| #define | vlsseg5e32_v_i32mf2_m(...) __riscv_vlsseg5e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg5e32_v_u32m1(...) __riscv_vlsseg5e32_v_u32m1(__VA_ARGS__) |
| #define | vlsseg5e32_v_u32m1_m(...) __riscv_vlsseg5e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e32_v_u32mf2(...) __riscv_vlsseg5e32_v_u32mf2(__VA_ARGS__) |
| #define | vlsseg5e32_v_u32mf2_m(...) __riscv_vlsseg5e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg5e64_v_f64m1(...) __riscv_vlsseg5e64_v_f64m1(__VA_ARGS__) |
| #define | vlsseg5e64_v_f64m1_m(...) __riscv_vlsseg5e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e64_v_i64m1(...) __riscv_vlsseg5e64_v_i64m1(__VA_ARGS__) |
| #define | vlsseg5e64_v_i64m1_m(...) __riscv_vlsseg5e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e64_v_u64m1(...) __riscv_vlsseg5e64_v_u64m1(__VA_ARGS__) |
| #define | vlsseg5e64_v_u64m1_m(...) __riscv_vlsseg5e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e8_v_i8m1(...) __riscv_vlsseg5e8_v_i8m1(__VA_ARGS__) |
| #define | vlsseg5e8_v_i8m1_m(...) __riscv_vlsseg5e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e8_v_i8mf2(...) __riscv_vlsseg5e8_v_i8mf2(__VA_ARGS__) |
| #define | vlsseg5e8_v_i8mf2_m(...) __riscv_vlsseg5e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg5e8_v_i8mf4(...) __riscv_vlsseg5e8_v_i8mf4(__VA_ARGS__) |
| #define | vlsseg5e8_v_i8mf4_m(...) __riscv_vlsseg5e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg5e8_v_i8mf8(...) __riscv_vlsseg5e8_v_i8mf8(__VA_ARGS__) |
| #define | vlsseg5e8_v_i8mf8_m(...) __riscv_vlsseg5e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg5e8_v_u8m1(...) __riscv_vlsseg5e8_v_u8m1(__VA_ARGS__) |
| #define | vlsseg5e8_v_u8m1_m(...) __riscv_vlsseg5e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlsseg5e8_v_u8mf2(...) __riscv_vlsseg5e8_v_u8mf2(__VA_ARGS__) |
| #define | vlsseg5e8_v_u8mf2_m(...) __riscv_vlsseg5e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg5e8_v_u8mf4(...) __riscv_vlsseg5e8_v_u8mf4(__VA_ARGS__) |
| #define | vlsseg5e8_v_u8mf4_m(...) __riscv_vlsseg5e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg5e8_v_u8mf8(...) __riscv_vlsseg5e8_v_u8mf8(__VA_ARGS__) |
| #define | vlsseg5e8_v_u8mf8_m(...) __riscv_vlsseg5e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_f16m1(...) __riscv_vlsseg6e16_v_f16m1(__VA_ARGS__) |
| #define | vlsseg6e16_v_f16m1_m(...) __riscv_vlsseg6e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_f16mf2(...) __riscv_vlsseg6e16_v_f16mf2(__VA_ARGS__) |
| #define | vlsseg6e16_v_f16mf2_m(...) __riscv_vlsseg6e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_f16mf4(...) __riscv_vlsseg6e16_v_f16mf4(__VA_ARGS__) |
| #define | vlsseg6e16_v_f16mf4_m(...) __riscv_vlsseg6e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_i16m1(...) __riscv_vlsseg6e16_v_i16m1(__VA_ARGS__) |
| #define | vlsseg6e16_v_i16m1_m(...) __riscv_vlsseg6e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_i16mf2(...) __riscv_vlsseg6e16_v_i16mf2(__VA_ARGS__) |
| #define | vlsseg6e16_v_i16mf2_m(...) __riscv_vlsseg6e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_i16mf4(...) __riscv_vlsseg6e16_v_i16mf4(__VA_ARGS__) |
| #define | vlsseg6e16_v_i16mf4_m(...) __riscv_vlsseg6e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_u16m1(...) __riscv_vlsseg6e16_v_u16m1(__VA_ARGS__) |
| #define | vlsseg6e16_v_u16m1_m(...) __riscv_vlsseg6e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_u16mf2(...) __riscv_vlsseg6e16_v_u16mf2(__VA_ARGS__) |
| #define | vlsseg6e16_v_u16mf2_m(...) __riscv_vlsseg6e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg6e16_v_u16mf4(...) __riscv_vlsseg6e16_v_u16mf4(__VA_ARGS__) |
| #define | vlsseg6e16_v_u16mf4_m(...) __riscv_vlsseg6e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg6e32_v_f32m1(...) __riscv_vlsseg6e32_v_f32m1(__VA_ARGS__) |
| #define | vlsseg6e32_v_f32m1_m(...) __riscv_vlsseg6e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e32_v_f32mf2(...) __riscv_vlsseg6e32_v_f32mf2(__VA_ARGS__) |
| #define | vlsseg6e32_v_f32mf2_m(...) __riscv_vlsseg6e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg6e32_v_i32m1(...) __riscv_vlsseg6e32_v_i32m1(__VA_ARGS__) |
| #define | vlsseg6e32_v_i32m1_m(...) __riscv_vlsseg6e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e32_v_i32mf2(...) __riscv_vlsseg6e32_v_i32mf2(__VA_ARGS__) |
| #define | vlsseg6e32_v_i32mf2_m(...) __riscv_vlsseg6e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg6e32_v_u32m1(...) __riscv_vlsseg6e32_v_u32m1(__VA_ARGS__) |
| #define | vlsseg6e32_v_u32m1_m(...) __riscv_vlsseg6e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e32_v_u32mf2(...) __riscv_vlsseg6e32_v_u32mf2(__VA_ARGS__) |
| #define | vlsseg6e32_v_u32mf2_m(...) __riscv_vlsseg6e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg6e64_v_f64m1(...) __riscv_vlsseg6e64_v_f64m1(__VA_ARGS__) |
| #define | vlsseg6e64_v_f64m1_m(...) __riscv_vlsseg6e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e64_v_i64m1(...) __riscv_vlsseg6e64_v_i64m1(__VA_ARGS__) |
| #define | vlsseg6e64_v_i64m1_m(...) __riscv_vlsseg6e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e64_v_u64m1(...) __riscv_vlsseg6e64_v_u64m1(__VA_ARGS__) |
| #define | vlsseg6e64_v_u64m1_m(...) __riscv_vlsseg6e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e8_v_i8m1(...) __riscv_vlsseg6e8_v_i8m1(__VA_ARGS__) |
| #define | vlsseg6e8_v_i8m1_m(...) __riscv_vlsseg6e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e8_v_i8mf2(...) __riscv_vlsseg6e8_v_i8mf2(__VA_ARGS__) |
| #define | vlsseg6e8_v_i8mf2_m(...) __riscv_vlsseg6e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg6e8_v_i8mf4(...) __riscv_vlsseg6e8_v_i8mf4(__VA_ARGS__) |
| #define | vlsseg6e8_v_i8mf4_m(...) __riscv_vlsseg6e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg6e8_v_i8mf8(...) __riscv_vlsseg6e8_v_i8mf8(__VA_ARGS__) |
| #define | vlsseg6e8_v_i8mf8_m(...) __riscv_vlsseg6e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg6e8_v_u8m1(...) __riscv_vlsseg6e8_v_u8m1(__VA_ARGS__) |
| #define | vlsseg6e8_v_u8m1_m(...) __riscv_vlsseg6e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlsseg6e8_v_u8mf2(...) __riscv_vlsseg6e8_v_u8mf2(__VA_ARGS__) |
| #define | vlsseg6e8_v_u8mf2_m(...) __riscv_vlsseg6e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg6e8_v_u8mf4(...) __riscv_vlsseg6e8_v_u8mf4(__VA_ARGS__) |
| #define | vlsseg6e8_v_u8mf4_m(...) __riscv_vlsseg6e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg6e8_v_u8mf8(...) __riscv_vlsseg6e8_v_u8mf8(__VA_ARGS__) |
| #define | vlsseg6e8_v_u8mf8_m(...) __riscv_vlsseg6e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_f16m1(...) __riscv_vlsseg7e16_v_f16m1(__VA_ARGS__) |
| #define | vlsseg7e16_v_f16m1_m(...) __riscv_vlsseg7e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_f16mf2(...) __riscv_vlsseg7e16_v_f16mf2(__VA_ARGS__) |
| #define | vlsseg7e16_v_f16mf2_m(...) __riscv_vlsseg7e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_f16mf4(...) __riscv_vlsseg7e16_v_f16mf4(__VA_ARGS__) |
| #define | vlsseg7e16_v_f16mf4_m(...) __riscv_vlsseg7e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_i16m1(...) __riscv_vlsseg7e16_v_i16m1(__VA_ARGS__) |
| #define | vlsseg7e16_v_i16m1_m(...) __riscv_vlsseg7e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_i16mf2(...) __riscv_vlsseg7e16_v_i16mf2(__VA_ARGS__) |
| #define | vlsseg7e16_v_i16mf2_m(...) __riscv_vlsseg7e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_i16mf4(...) __riscv_vlsseg7e16_v_i16mf4(__VA_ARGS__) |
| #define | vlsseg7e16_v_i16mf4_m(...) __riscv_vlsseg7e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_u16m1(...) __riscv_vlsseg7e16_v_u16m1(__VA_ARGS__) |
| #define | vlsseg7e16_v_u16m1_m(...) __riscv_vlsseg7e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_u16mf2(...) __riscv_vlsseg7e16_v_u16mf2(__VA_ARGS__) |
| #define | vlsseg7e16_v_u16mf2_m(...) __riscv_vlsseg7e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg7e16_v_u16mf4(...) __riscv_vlsseg7e16_v_u16mf4(__VA_ARGS__) |
| #define | vlsseg7e16_v_u16mf4_m(...) __riscv_vlsseg7e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg7e32_v_f32m1(...) __riscv_vlsseg7e32_v_f32m1(__VA_ARGS__) |
| #define | vlsseg7e32_v_f32m1_m(...) __riscv_vlsseg7e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e32_v_f32mf2(...) __riscv_vlsseg7e32_v_f32mf2(__VA_ARGS__) |
| #define | vlsseg7e32_v_f32mf2_m(...) __riscv_vlsseg7e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg7e32_v_i32m1(...) __riscv_vlsseg7e32_v_i32m1(__VA_ARGS__) |
| #define | vlsseg7e32_v_i32m1_m(...) __riscv_vlsseg7e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e32_v_i32mf2(...) __riscv_vlsseg7e32_v_i32mf2(__VA_ARGS__) |
| #define | vlsseg7e32_v_i32mf2_m(...) __riscv_vlsseg7e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg7e32_v_u32m1(...) __riscv_vlsseg7e32_v_u32m1(__VA_ARGS__) |
| #define | vlsseg7e32_v_u32m1_m(...) __riscv_vlsseg7e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e32_v_u32mf2(...) __riscv_vlsseg7e32_v_u32mf2(__VA_ARGS__) |
| #define | vlsseg7e32_v_u32mf2_m(...) __riscv_vlsseg7e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg7e64_v_f64m1(...) __riscv_vlsseg7e64_v_f64m1(__VA_ARGS__) |
| #define | vlsseg7e64_v_f64m1_m(...) __riscv_vlsseg7e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e64_v_i64m1(...) __riscv_vlsseg7e64_v_i64m1(__VA_ARGS__) |
| #define | vlsseg7e64_v_i64m1_m(...) __riscv_vlsseg7e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e64_v_u64m1(...) __riscv_vlsseg7e64_v_u64m1(__VA_ARGS__) |
| #define | vlsseg7e64_v_u64m1_m(...) __riscv_vlsseg7e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e8_v_i8m1(...) __riscv_vlsseg7e8_v_i8m1(__VA_ARGS__) |
| #define | vlsseg7e8_v_i8m1_m(...) __riscv_vlsseg7e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e8_v_i8mf2(...) __riscv_vlsseg7e8_v_i8mf2(__VA_ARGS__) |
| #define | vlsseg7e8_v_i8mf2_m(...) __riscv_vlsseg7e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg7e8_v_i8mf4(...) __riscv_vlsseg7e8_v_i8mf4(__VA_ARGS__) |
| #define | vlsseg7e8_v_i8mf4_m(...) __riscv_vlsseg7e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg7e8_v_i8mf8(...) __riscv_vlsseg7e8_v_i8mf8(__VA_ARGS__) |
| #define | vlsseg7e8_v_i8mf8_m(...) __riscv_vlsseg7e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg7e8_v_u8m1(...) __riscv_vlsseg7e8_v_u8m1(__VA_ARGS__) |
| #define | vlsseg7e8_v_u8m1_m(...) __riscv_vlsseg7e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlsseg7e8_v_u8mf2(...) __riscv_vlsseg7e8_v_u8mf2(__VA_ARGS__) |
| #define | vlsseg7e8_v_u8mf2_m(...) __riscv_vlsseg7e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg7e8_v_u8mf4(...) __riscv_vlsseg7e8_v_u8mf4(__VA_ARGS__) |
| #define | vlsseg7e8_v_u8mf4_m(...) __riscv_vlsseg7e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg7e8_v_u8mf8(...) __riscv_vlsseg7e8_v_u8mf8(__VA_ARGS__) |
| #define | vlsseg7e8_v_u8mf8_m(...) __riscv_vlsseg7e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_f16m1(...) __riscv_vlsseg8e16_v_f16m1(__VA_ARGS__) |
| #define | vlsseg8e16_v_f16m1_m(...) __riscv_vlsseg8e16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_f16mf2(...) __riscv_vlsseg8e16_v_f16mf2(__VA_ARGS__) |
| #define | vlsseg8e16_v_f16mf2_m(...) __riscv_vlsseg8e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_f16mf4(...) __riscv_vlsseg8e16_v_f16mf4(__VA_ARGS__) |
| #define | vlsseg8e16_v_f16mf4_m(...) __riscv_vlsseg8e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_i16m1(...) __riscv_vlsseg8e16_v_i16m1(__VA_ARGS__) |
| #define | vlsseg8e16_v_i16m1_m(...) __riscv_vlsseg8e16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_i16mf2(...) __riscv_vlsseg8e16_v_i16mf2(__VA_ARGS__) |
| #define | vlsseg8e16_v_i16mf2_m(...) __riscv_vlsseg8e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_i16mf4(...) __riscv_vlsseg8e16_v_i16mf4(__VA_ARGS__) |
| #define | vlsseg8e16_v_i16mf4_m(...) __riscv_vlsseg8e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_u16m1(...) __riscv_vlsseg8e16_v_u16m1(__VA_ARGS__) |
| #define | vlsseg8e16_v_u16m1_m(...) __riscv_vlsseg8e16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_u16mf2(...) __riscv_vlsseg8e16_v_u16mf2(__VA_ARGS__) |
| #define | vlsseg8e16_v_u16mf2_m(...) __riscv_vlsseg8e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vlsseg8e16_v_u16mf4(...) __riscv_vlsseg8e16_v_u16mf4(__VA_ARGS__) |
| #define | vlsseg8e16_v_u16mf4_m(...) __riscv_vlsseg8e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vlsseg8e32_v_f32m1(...) __riscv_vlsseg8e32_v_f32m1(__VA_ARGS__) |
| #define | vlsseg8e32_v_f32m1_m(...) __riscv_vlsseg8e32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e32_v_f32mf2(...) __riscv_vlsseg8e32_v_f32mf2(__VA_ARGS__) |
| #define | vlsseg8e32_v_f32mf2_m(...) __riscv_vlsseg8e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg8e32_v_i32m1(...) __riscv_vlsseg8e32_v_i32m1(__VA_ARGS__) |
| #define | vlsseg8e32_v_i32m1_m(...) __riscv_vlsseg8e32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e32_v_i32mf2(...) __riscv_vlsseg8e32_v_i32mf2(__VA_ARGS__) |
| #define | vlsseg8e32_v_i32mf2_m(...) __riscv_vlsseg8e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg8e32_v_u32m1(...) __riscv_vlsseg8e32_v_u32m1(__VA_ARGS__) |
| #define | vlsseg8e32_v_u32m1_m(...) __riscv_vlsseg8e32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e32_v_u32mf2(...) __riscv_vlsseg8e32_v_u32mf2(__VA_ARGS__) |
| #define | vlsseg8e32_v_u32mf2_m(...) __riscv_vlsseg8e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vlsseg8e64_v_f64m1(...) __riscv_vlsseg8e64_v_f64m1(__VA_ARGS__) |
| #define | vlsseg8e64_v_f64m1_m(...) __riscv_vlsseg8e64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e64_v_i64m1(...) __riscv_vlsseg8e64_v_i64m1(__VA_ARGS__) |
| #define | vlsseg8e64_v_i64m1_m(...) __riscv_vlsseg8e64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e64_v_u64m1(...) __riscv_vlsseg8e64_v_u64m1(__VA_ARGS__) |
| #define | vlsseg8e64_v_u64m1_m(...) __riscv_vlsseg8e64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e8_v_i8m1(...) __riscv_vlsseg8e8_v_i8m1(__VA_ARGS__) |
| #define | vlsseg8e8_v_i8m1_m(...) __riscv_vlsseg8e8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e8_v_i8mf2(...) __riscv_vlsseg8e8_v_i8mf2(__VA_ARGS__) |
| #define | vlsseg8e8_v_i8mf2_m(...) __riscv_vlsseg8e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg8e8_v_i8mf4(...) __riscv_vlsseg8e8_v_i8mf4(__VA_ARGS__) |
| #define | vlsseg8e8_v_i8mf4_m(...) __riscv_vlsseg8e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg8e8_v_i8mf8(...) __riscv_vlsseg8e8_v_i8mf8(__VA_ARGS__) |
| #define | vlsseg8e8_v_i8mf8_m(...) __riscv_vlsseg8e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vlsseg8e8_v_u8m1(...) __riscv_vlsseg8e8_v_u8m1(__VA_ARGS__) |
| #define | vlsseg8e8_v_u8m1_m(...) __riscv_vlsseg8e8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vlsseg8e8_v_u8mf2(...) __riscv_vlsseg8e8_v_u8mf2(__VA_ARGS__) |
| #define | vlsseg8e8_v_u8mf2_m(...) __riscv_vlsseg8e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vlsseg8e8_v_u8mf4(...) __riscv_vlsseg8e8_v_u8mf4(__VA_ARGS__) |
| #define | vlsseg8e8_v_u8mf4_m(...) __riscv_vlsseg8e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vlsseg8e8_v_u8mf8(...) __riscv_vlsseg8e8_v_u8mf8(__VA_ARGS__) |
| #define | vlsseg8e8_v_u8mf8_m(...) __riscv_vlsseg8e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f16m1(...) __riscv_vluxei16_v_f16m1(__VA_ARGS__) |
| #define | vluxei16_v_f16m1_m(...) __riscv_vluxei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f16m2(...) __riscv_vluxei16_v_f16m2(__VA_ARGS__) |
| #define | vluxei16_v_f16m2_m(...) __riscv_vluxei16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f16m4(...) __riscv_vluxei16_v_f16m4(__VA_ARGS__) |
| #define | vluxei16_v_f16m4_m(...) __riscv_vluxei16_v_f16m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f16m8(...) __riscv_vluxei16_v_f16m8(__VA_ARGS__) |
| #define | vluxei16_v_f16m8_m(...) __riscv_vluxei16_v_f16m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f16mf2(...) __riscv_vluxei16_v_f16mf2(__VA_ARGS__) |
| #define | vluxei16_v_f16mf2_m(...) __riscv_vluxei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f16mf4(...) __riscv_vluxei16_v_f16mf4(__VA_ARGS__) |
| #define | vluxei16_v_f16mf4_m(...) __riscv_vluxei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f32m1(...) __riscv_vluxei16_v_f32m1(__VA_ARGS__) |
| #define | vluxei16_v_f32m1_m(...) __riscv_vluxei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f32m2(...) __riscv_vluxei16_v_f32m2(__VA_ARGS__) |
| #define | vluxei16_v_f32m2_m(...) __riscv_vluxei16_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f32m4(...) __riscv_vluxei16_v_f32m4(__VA_ARGS__) |
| #define | vluxei16_v_f32m4_m(...) __riscv_vluxei16_v_f32m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f32m8(...) __riscv_vluxei16_v_f32m8(__VA_ARGS__) |
| #define | vluxei16_v_f32m8_m(...) __riscv_vluxei16_v_f32m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f32mf2(...) __riscv_vluxei16_v_f32mf2(__VA_ARGS__) |
| #define | vluxei16_v_f32mf2_m(...) __riscv_vluxei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f64m1(...) __riscv_vluxei16_v_f64m1(__VA_ARGS__) |
| #define | vluxei16_v_f64m1_m(...) __riscv_vluxei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f64m2(...) __riscv_vluxei16_v_f64m2(__VA_ARGS__) |
| #define | vluxei16_v_f64m2_m(...) __riscv_vluxei16_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f64m4(...) __riscv_vluxei16_v_f64m4(__VA_ARGS__) |
| #define | vluxei16_v_f64m4_m(...) __riscv_vluxei16_v_f64m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_f64m8(...) __riscv_vluxei16_v_f64m8(__VA_ARGS__) |
| #define | vluxei16_v_f64m8_m(...) __riscv_vluxei16_v_f64m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i16m1(...) __riscv_vluxei16_v_i16m1(__VA_ARGS__) |
| #define | vluxei16_v_i16m1_m(...) __riscv_vluxei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i16m2(...) __riscv_vluxei16_v_i16m2(__VA_ARGS__) |
| #define | vluxei16_v_i16m2_m(...) __riscv_vluxei16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i16m4(...) __riscv_vluxei16_v_i16m4(__VA_ARGS__) |
| #define | vluxei16_v_i16m4_m(...) __riscv_vluxei16_v_i16m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i16m8(...) __riscv_vluxei16_v_i16m8(__VA_ARGS__) |
| #define | vluxei16_v_i16m8_m(...) __riscv_vluxei16_v_i16m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i16mf2(...) __riscv_vluxei16_v_i16mf2(__VA_ARGS__) |
| #define | vluxei16_v_i16mf2_m(...) __riscv_vluxei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i16mf4(...) __riscv_vluxei16_v_i16mf4(__VA_ARGS__) |
| #define | vluxei16_v_i16mf4_m(...) __riscv_vluxei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i32m1(...) __riscv_vluxei16_v_i32m1(__VA_ARGS__) |
| #define | vluxei16_v_i32m1_m(...) __riscv_vluxei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i32m2(...) __riscv_vluxei16_v_i32m2(__VA_ARGS__) |
| #define | vluxei16_v_i32m2_m(...) __riscv_vluxei16_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i32m4(...) __riscv_vluxei16_v_i32m4(__VA_ARGS__) |
| #define | vluxei16_v_i32m4_m(...) __riscv_vluxei16_v_i32m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i32m8(...) __riscv_vluxei16_v_i32m8(__VA_ARGS__) |
| #define | vluxei16_v_i32m8_m(...) __riscv_vluxei16_v_i32m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i32mf2(...) __riscv_vluxei16_v_i32mf2(__VA_ARGS__) |
| #define | vluxei16_v_i32mf2_m(...) __riscv_vluxei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i64m1(...) __riscv_vluxei16_v_i64m1(__VA_ARGS__) |
| #define | vluxei16_v_i64m1_m(...) __riscv_vluxei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i64m2(...) __riscv_vluxei16_v_i64m2(__VA_ARGS__) |
| #define | vluxei16_v_i64m2_m(...) __riscv_vluxei16_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i64m4(...) __riscv_vluxei16_v_i64m4(__VA_ARGS__) |
| #define | vluxei16_v_i64m4_m(...) __riscv_vluxei16_v_i64m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i64m8(...) __riscv_vluxei16_v_i64m8(__VA_ARGS__) |
| #define | vluxei16_v_i64m8_m(...) __riscv_vluxei16_v_i64m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i8m1(...) __riscv_vluxei16_v_i8m1(__VA_ARGS__) |
| #define | vluxei16_v_i8m1_m(...) __riscv_vluxei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i8m2(...) __riscv_vluxei16_v_i8m2(__VA_ARGS__) |
| #define | vluxei16_v_i8m2_m(...) __riscv_vluxei16_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i8m4(...) __riscv_vluxei16_v_i8m4(__VA_ARGS__) |
| #define | vluxei16_v_i8m4_m(...) __riscv_vluxei16_v_i8m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i8mf2(...) __riscv_vluxei16_v_i8mf2(__VA_ARGS__) |
| #define | vluxei16_v_i8mf2_m(...) __riscv_vluxei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i8mf4(...) __riscv_vluxei16_v_i8mf4(__VA_ARGS__) |
| #define | vluxei16_v_i8mf4_m(...) __riscv_vluxei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_i8mf8(...) __riscv_vluxei16_v_i8mf8(__VA_ARGS__) |
| #define | vluxei16_v_i8mf8_m(...) __riscv_vluxei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u16m1(...) __riscv_vluxei16_v_u16m1(__VA_ARGS__) |
| #define | vluxei16_v_u16m1_m(...) __riscv_vluxei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u16m2(...) __riscv_vluxei16_v_u16m2(__VA_ARGS__) |
| #define | vluxei16_v_u16m2_m(...) __riscv_vluxei16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u16m4(...) __riscv_vluxei16_v_u16m4(__VA_ARGS__) |
| #define | vluxei16_v_u16m4_m(...) __riscv_vluxei16_v_u16m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u16m8(...) __riscv_vluxei16_v_u16m8(__VA_ARGS__) |
| #define | vluxei16_v_u16m8_m(...) __riscv_vluxei16_v_u16m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u16mf2(...) __riscv_vluxei16_v_u16mf2(__VA_ARGS__) |
| #define | vluxei16_v_u16mf2_m(...) __riscv_vluxei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u16mf4(...) __riscv_vluxei16_v_u16mf4(__VA_ARGS__) |
| #define | vluxei16_v_u16mf4_m(...) __riscv_vluxei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u32m1(...) __riscv_vluxei16_v_u32m1(__VA_ARGS__) |
| #define | vluxei16_v_u32m1_m(...) __riscv_vluxei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u32m2(...) __riscv_vluxei16_v_u32m2(__VA_ARGS__) |
| #define | vluxei16_v_u32m2_m(...) __riscv_vluxei16_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u32m4(...) __riscv_vluxei16_v_u32m4(__VA_ARGS__) |
| #define | vluxei16_v_u32m4_m(...) __riscv_vluxei16_v_u32m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u32m8(...) __riscv_vluxei16_v_u32m8(__VA_ARGS__) |
| #define | vluxei16_v_u32m8_m(...) __riscv_vluxei16_v_u32m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u32mf2(...) __riscv_vluxei16_v_u32mf2(__VA_ARGS__) |
| #define | vluxei16_v_u32mf2_m(...) __riscv_vluxei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u64m1(...) __riscv_vluxei16_v_u64m1(__VA_ARGS__) |
| #define | vluxei16_v_u64m1_m(...) __riscv_vluxei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u64m2(...) __riscv_vluxei16_v_u64m2(__VA_ARGS__) |
| #define | vluxei16_v_u64m2_m(...) __riscv_vluxei16_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u64m4(...) __riscv_vluxei16_v_u64m4(__VA_ARGS__) |
| #define | vluxei16_v_u64m4_m(...) __riscv_vluxei16_v_u64m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u64m8(...) __riscv_vluxei16_v_u64m8(__VA_ARGS__) |
| #define | vluxei16_v_u64m8_m(...) __riscv_vluxei16_v_u64m8_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u8m1(...) __riscv_vluxei16_v_u8m1(__VA_ARGS__) |
| #define | vluxei16_v_u8m1_m(...) __riscv_vluxei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u8m2(...) __riscv_vluxei16_v_u8m2(__VA_ARGS__) |
| #define | vluxei16_v_u8m2_m(...) __riscv_vluxei16_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u8m4(...) __riscv_vluxei16_v_u8m4(__VA_ARGS__) |
| #define | vluxei16_v_u8m4_m(...) __riscv_vluxei16_v_u8m4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u8mf2(...) __riscv_vluxei16_v_u8mf2(__VA_ARGS__) |
| #define | vluxei16_v_u8mf2_m(...) __riscv_vluxei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u8mf4(...) __riscv_vluxei16_v_u8mf4(__VA_ARGS__) |
| #define | vluxei16_v_u8mf4_m(...) __riscv_vluxei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxei16_v_u8mf8(...) __riscv_vluxei16_v_u8mf8(__VA_ARGS__) |
| #define | vluxei16_v_u8mf8_m(...) __riscv_vluxei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f16m1(...) __riscv_vluxei32_v_f16m1(__VA_ARGS__) |
| #define | vluxei32_v_f16m1_m(...) __riscv_vluxei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f16m2(...) __riscv_vluxei32_v_f16m2(__VA_ARGS__) |
| #define | vluxei32_v_f16m2_m(...) __riscv_vluxei32_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f16m4(...) __riscv_vluxei32_v_f16m4(__VA_ARGS__) |
| #define | vluxei32_v_f16m4_m(...) __riscv_vluxei32_v_f16m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f16mf2(...) __riscv_vluxei32_v_f16mf2(__VA_ARGS__) |
| #define | vluxei32_v_f16mf2_m(...) __riscv_vluxei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f16mf4(...) __riscv_vluxei32_v_f16mf4(__VA_ARGS__) |
| #define | vluxei32_v_f16mf4_m(...) __riscv_vluxei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f32m1(...) __riscv_vluxei32_v_f32m1(__VA_ARGS__) |
| #define | vluxei32_v_f32m1_m(...) __riscv_vluxei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f32m2(...) __riscv_vluxei32_v_f32m2(__VA_ARGS__) |
| #define | vluxei32_v_f32m2_m(...) __riscv_vluxei32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f32m4(...) __riscv_vluxei32_v_f32m4(__VA_ARGS__) |
| #define | vluxei32_v_f32m4_m(...) __riscv_vluxei32_v_f32m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f32m8(...) __riscv_vluxei32_v_f32m8(__VA_ARGS__) |
| #define | vluxei32_v_f32m8_m(...) __riscv_vluxei32_v_f32m8_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f32mf2(...) __riscv_vluxei32_v_f32mf2(__VA_ARGS__) |
| #define | vluxei32_v_f32mf2_m(...) __riscv_vluxei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f64m1(...) __riscv_vluxei32_v_f64m1(__VA_ARGS__) |
| #define | vluxei32_v_f64m1_m(...) __riscv_vluxei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f64m2(...) __riscv_vluxei32_v_f64m2(__VA_ARGS__) |
| #define | vluxei32_v_f64m2_m(...) __riscv_vluxei32_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f64m4(...) __riscv_vluxei32_v_f64m4(__VA_ARGS__) |
| #define | vluxei32_v_f64m4_m(...) __riscv_vluxei32_v_f64m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_f64m8(...) __riscv_vluxei32_v_f64m8(__VA_ARGS__) |
| #define | vluxei32_v_f64m8_m(...) __riscv_vluxei32_v_f64m8_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i16m1(...) __riscv_vluxei32_v_i16m1(__VA_ARGS__) |
| #define | vluxei32_v_i16m1_m(...) __riscv_vluxei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i16m2(...) __riscv_vluxei32_v_i16m2(__VA_ARGS__) |
| #define | vluxei32_v_i16m2_m(...) __riscv_vluxei32_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i16m4(...) __riscv_vluxei32_v_i16m4(__VA_ARGS__) |
| #define | vluxei32_v_i16m4_m(...) __riscv_vluxei32_v_i16m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i16mf2(...) __riscv_vluxei32_v_i16mf2(__VA_ARGS__) |
| #define | vluxei32_v_i16mf2_m(...) __riscv_vluxei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i16mf4(...) __riscv_vluxei32_v_i16mf4(__VA_ARGS__) |
| #define | vluxei32_v_i16mf4_m(...) __riscv_vluxei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i32m1(...) __riscv_vluxei32_v_i32m1(__VA_ARGS__) |
| #define | vluxei32_v_i32m1_m(...) __riscv_vluxei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i32m2(...) __riscv_vluxei32_v_i32m2(__VA_ARGS__) |
| #define | vluxei32_v_i32m2_m(...) __riscv_vluxei32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i32m4(...) __riscv_vluxei32_v_i32m4(__VA_ARGS__) |
| #define | vluxei32_v_i32m4_m(...) __riscv_vluxei32_v_i32m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i32m8(...) __riscv_vluxei32_v_i32m8(__VA_ARGS__) |
| #define | vluxei32_v_i32m8_m(...) __riscv_vluxei32_v_i32m8_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i32mf2(...) __riscv_vluxei32_v_i32mf2(__VA_ARGS__) |
| #define | vluxei32_v_i32mf2_m(...) __riscv_vluxei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i64m1(...) __riscv_vluxei32_v_i64m1(__VA_ARGS__) |
| #define | vluxei32_v_i64m1_m(...) __riscv_vluxei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i64m2(...) __riscv_vluxei32_v_i64m2(__VA_ARGS__) |
| #define | vluxei32_v_i64m2_m(...) __riscv_vluxei32_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i64m4(...) __riscv_vluxei32_v_i64m4(__VA_ARGS__) |
| #define | vluxei32_v_i64m4_m(...) __riscv_vluxei32_v_i64m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i64m8(...) __riscv_vluxei32_v_i64m8(__VA_ARGS__) |
| #define | vluxei32_v_i64m8_m(...) __riscv_vluxei32_v_i64m8_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i8m1(...) __riscv_vluxei32_v_i8m1(__VA_ARGS__) |
| #define | vluxei32_v_i8m1_m(...) __riscv_vluxei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i8m2(...) __riscv_vluxei32_v_i8m2(__VA_ARGS__) |
| #define | vluxei32_v_i8m2_m(...) __riscv_vluxei32_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i8mf2(...) __riscv_vluxei32_v_i8mf2(__VA_ARGS__) |
| #define | vluxei32_v_i8mf2_m(...) __riscv_vluxei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i8mf4(...) __riscv_vluxei32_v_i8mf4(__VA_ARGS__) |
| #define | vluxei32_v_i8mf4_m(...) __riscv_vluxei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_i8mf8(...) __riscv_vluxei32_v_i8mf8(__VA_ARGS__) |
| #define | vluxei32_v_i8mf8_m(...) __riscv_vluxei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u16m1(...) __riscv_vluxei32_v_u16m1(__VA_ARGS__) |
| #define | vluxei32_v_u16m1_m(...) __riscv_vluxei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u16m2(...) __riscv_vluxei32_v_u16m2(__VA_ARGS__) |
| #define | vluxei32_v_u16m2_m(...) __riscv_vluxei32_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u16m4(...) __riscv_vluxei32_v_u16m4(__VA_ARGS__) |
| #define | vluxei32_v_u16m4_m(...) __riscv_vluxei32_v_u16m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u16mf2(...) __riscv_vluxei32_v_u16mf2(__VA_ARGS__) |
| #define | vluxei32_v_u16mf2_m(...) __riscv_vluxei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u16mf4(...) __riscv_vluxei32_v_u16mf4(__VA_ARGS__) |
| #define | vluxei32_v_u16mf4_m(...) __riscv_vluxei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u32m1(...) __riscv_vluxei32_v_u32m1(__VA_ARGS__) |
| #define | vluxei32_v_u32m1_m(...) __riscv_vluxei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u32m2(...) __riscv_vluxei32_v_u32m2(__VA_ARGS__) |
| #define | vluxei32_v_u32m2_m(...) __riscv_vluxei32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u32m4(...) __riscv_vluxei32_v_u32m4(__VA_ARGS__) |
| #define | vluxei32_v_u32m4_m(...) __riscv_vluxei32_v_u32m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u32m8(...) __riscv_vluxei32_v_u32m8(__VA_ARGS__) |
| #define | vluxei32_v_u32m8_m(...) __riscv_vluxei32_v_u32m8_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u32mf2(...) __riscv_vluxei32_v_u32mf2(__VA_ARGS__) |
| #define | vluxei32_v_u32mf2_m(...) __riscv_vluxei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u64m1(...) __riscv_vluxei32_v_u64m1(__VA_ARGS__) |
| #define | vluxei32_v_u64m1_m(...) __riscv_vluxei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u64m2(...) __riscv_vluxei32_v_u64m2(__VA_ARGS__) |
| #define | vluxei32_v_u64m2_m(...) __riscv_vluxei32_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u64m4(...) __riscv_vluxei32_v_u64m4(__VA_ARGS__) |
| #define | vluxei32_v_u64m4_m(...) __riscv_vluxei32_v_u64m4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u64m8(...) __riscv_vluxei32_v_u64m8(__VA_ARGS__) |
| #define | vluxei32_v_u64m8_m(...) __riscv_vluxei32_v_u64m8_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u8m1(...) __riscv_vluxei32_v_u8m1(__VA_ARGS__) |
| #define | vluxei32_v_u8m1_m(...) __riscv_vluxei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u8m2(...) __riscv_vluxei32_v_u8m2(__VA_ARGS__) |
| #define | vluxei32_v_u8m2_m(...) __riscv_vluxei32_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u8mf2(...) __riscv_vluxei32_v_u8mf2(__VA_ARGS__) |
| #define | vluxei32_v_u8mf2_m(...) __riscv_vluxei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u8mf4(...) __riscv_vluxei32_v_u8mf4(__VA_ARGS__) |
| #define | vluxei32_v_u8mf4_m(...) __riscv_vluxei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxei32_v_u8mf8(...) __riscv_vluxei32_v_u8mf8(__VA_ARGS__) |
| #define | vluxei32_v_u8mf8_m(...) __riscv_vluxei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f16m1(...) __riscv_vluxei64_v_f16m1(__VA_ARGS__) |
| #define | vluxei64_v_f16m1_m(...) __riscv_vluxei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f16m2(...) __riscv_vluxei64_v_f16m2(__VA_ARGS__) |
| #define | vluxei64_v_f16m2_m(...) __riscv_vluxei64_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f16mf2(...) __riscv_vluxei64_v_f16mf2(__VA_ARGS__) |
| #define | vluxei64_v_f16mf2_m(...) __riscv_vluxei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f16mf4(...) __riscv_vluxei64_v_f16mf4(__VA_ARGS__) |
| #define | vluxei64_v_f16mf4_m(...) __riscv_vluxei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f32m1(...) __riscv_vluxei64_v_f32m1(__VA_ARGS__) |
| #define | vluxei64_v_f32m1_m(...) __riscv_vluxei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f32m2(...) __riscv_vluxei64_v_f32m2(__VA_ARGS__) |
| #define | vluxei64_v_f32m2_m(...) __riscv_vluxei64_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f32m4(...) __riscv_vluxei64_v_f32m4(__VA_ARGS__) |
| #define | vluxei64_v_f32m4_m(...) __riscv_vluxei64_v_f32m4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f32mf2(...) __riscv_vluxei64_v_f32mf2(__VA_ARGS__) |
| #define | vluxei64_v_f32mf2_m(...) __riscv_vluxei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f64m1(...) __riscv_vluxei64_v_f64m1(__VA_ARGS__) |
| #define | vluxei64_v_f64m1_m(...) __riscv_vluxei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f64m2(...) __riscv_vluxei64_v_f64m2(__VA_ARGS__) |
| #define | vluxei64_v_f64m2_m(...) __riscv_vluxei64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f64m4(...) __riscv_vluxei64_v_f64m4(__VA_ARGS__) |
| #define | vluxei64_v_f64m4_m(...) __riscv_vluxei64_v_f64m4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_f64m8(...) __riscv_vluxei64_v_f64m8(__VA_ARGS__) |
| #define | vluxei64_v_f64m8_m(...) __riscv_vluxei64_v_f64m8_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i16m1(...) __riscv_vluxei64_v_i16m1(__VA_ARGS__) |
| #define | vluxei64_v_i16m1_m(...) __riscv_vluxei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i16m2(...) __riscv_vluxei64_v_i16m2(__VA_ARGS__) |
| #define | vluxei64_v_i16m2_m(...) __riscv_vluxei64_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i16mf2(...) __riscv_vluxei64_v_i16mf2(__VA_ARGS__) |
| #define | vluxei64_v_i16mf2_m(...) __riscv_vluxei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i16mf4(...) __riscv_vluxei64_v_i16mf4(__VA_ARGS__) |
| #define | vluxei64_v_i16mf4_m(...) __riscv_vluxei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i32m1(...) __riscv_vluxei64_v_i32m1(__VA_ARGS__) |
| #define | vluxei64_v_i32m1_m(...) __riscv_vluxei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i32m2(...) __riscv_vluxei64_v_i32m2(__VA_ARGS__) |
| #define | vluxei64_v_i32m2_m(...) __riscv_vluxei64_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i32m4(...) __riscv_vluxei64_v_i32m4(__VA_ARGS__) |
| #define | vluxei64_v_i32m4_m(...) __riscv_vluxei64_v_i32m4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i32mf2(...) __riscv_vluxei64_v_i32mf2(__VA_ARGS__) |
| #define | vluxei64_v_i32mf2_m(...) __riscv_vluxei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i64m1(...) __riscv_vluxei64_v_i64m1(__VA_ARGS__) |
| #define | vluxei64_v_i64m1_m(...) __riscv_vluxei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i64m2(...) __riscv_vluxei64_v_i64m2(__VA_ARGS__) |
| #define | vluxei64_v_i64m2_m(...) __riscv_vluxei64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i64m4(...) __riscv_vluxei64_v_i64m4(__VA_ARGS__) |
| #define | vluxei64_v_i64m4_m(...) __riscv_vluxei64_v_i64m4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i64m8(...) __riscv_vluxei64_v_i64m8(__VA_ARGS__) |
| #define | vluxei64_v_i64m8_m(...) __riscv_vluxei64_v_i64m8_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i8m1(...) __riscv_vluxei64_v_i8m1(__VA_ARGS__) |
| #define | vluxei64_v_i8m1_m(...) __riscv_vluxei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i8mf2(...) __riscv_vluxei64_v_i8mf2(__VA_ARGS__) |
| #define | vluxei64_v_i8mf2_m(...) __riscv_vluxei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i8mf4(...) __riscv_vluxei64_v_i8mf4(__VA_ARGS__) |
| #define | vluxei64_v_i8mf4_m(...) __riscv_vluxei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_i8mf8(...) __riscv_vluxei64_v_i8mf8(__VA_ARGS__) |
| #define | vluxei64_v_i8mf8_m(...) __riscv_vluxei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u16m1(...) __riscv_vluxei64_v_u16m1(__VA_ARGS__) |
| #define | vluxei64_v_u16m1_m(...) __riscv_vluxei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u16m2(...) __riscv_vluxei64_v_u16m2(__VA_ARGS__) |
| #define | vluxei64_v_u16m2_m(...) __riscv_vluxei64_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u16mf2(...) __riscv_vluxei64_v_u16mf2(__VA_ARGS__) |
| #define | vluxei64_v_u16mf2_m(...) __riscv_vluxei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u16mf4(...) __riscv_vluxei64_v_u16mf4(__VA_ARGS__) |
| #define | vluxei64_v_u16mf4_m(...) __riscv_vluxei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u32m1(...) __riscv_vluxei64_v_u32m1(__VA_ARGS__) |
| #define | vluxei64_v_u32m1_m(...) __riscv_vluxei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u32m2(...) __riscv_vluxei64_v_u32m2(__VA_ARGS__) |
| #define | vluxei64_v_u32m2_m(...) __riscv_vluxei64_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u32m4(...) __riscv_vluxei64_v_u32m4(__VA_ARGS__) |
| #define | vluxei64_v_u32m4_m(...) __riscv_vluxei64_v_u32m4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u32mf2(...) __riscv_vluxei64_v_u32mf2(__VA_ARGS__) |
| #define | vluxei64_v_u32mf2_m(...) __riscv_vluxei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u64m1(...) __riscv_vluxei64_v_u64m1(__VA_ARGS__) |
| #define | vluxei64_v_u64m1_m(...) __riscv_vluxei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u64m2(...) __riscv_vluxei64_v_u64m2(__VA_ARGS__) |
| #define | vluxei64_v_u64m2_m(...) __riscv_vluxei64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u64m4(...) __riscv_vluxei64_v_u64m4(__VA_ARGS__) |
| #define | vluxei64_v_u64m4_m(...) __riscv_vluxei64_v_u64m4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u64m8(...) __riscv_vluxei64_v_u64m8(__VA_ARGS__) |
| #define | vluxei64_v_u64m8_m(...) __riscv_vluxei64_v_u64m8_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u8m1(...) __riscv_vluxei64_v_u8m1(__VA_ARGS__) |
| #define | vluxei64_v_u8m1_m(...) __riscv_vluxei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u8mf2(...) __riscv_vluxei64_v_u8mf2(__VA_ARGS__) |
| #define | vluxei64_v_u8mf2_m(...) __riscv_vluxei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u8mf4(...) __riscv_vluxei64_v_u8mf4(__VA_ARGS__) |
| #define | vluxei64_v_u8mf4_m(...) __riscv_vluxei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxei64_v_u8mf8(...) __riscv_vluxei64_v_u8mf8(__VA_ARGS__) |
| #define | vluxei64_v_u8mf8_m(...) __riscv_vluxei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f16m1(...) __riscv_vluxei8_v_f16m1(__VA_ARGS__) |
| #define | vluxei8_v_f16m1_m(...) __riscv_vluxei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f16m2(...) __riscv_vluxei8_v_f16m2(__VA_ARGS__) |
| #define | vluxei8_v_f16m2_m(...) __riscv_vluxei8_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f16m4(...) __riscv_vluxei8_v_f16m4(__VA_ARGS__) |
| #define | vluxei8_v_f16m4_m(...) __riscv_vluxei8_v_f16m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f16m8(...) __riscv_vluxei8_v_f16m8(__VA_ARGS__) |
| #define | vluxei8_v_f16m8_m(...) __riscv_vluxei8_v_f16m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f16mf2(...) __riscv_vluxei8_v_f16mf2(__VA_ARGS__) |
| #define | vluxei8_v_f16mf2_m(...) __riscv_vluxei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f16mf4(...) __riscv_vluxei8_v_f16mf4(__VA_ARGS__) |
| #define | vluxei8_v_f16mf4_m(...) __riscv_vluxei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f32m1(...) __riscv_vluxei8_v_f32m1(__VA_ARGS__) |
| #define | vluxei8_v_f32m1_m(...) __riscv_vluxei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f32m2(...) __riscv_vluxei8_v_f32m2(__VA_ARGS__) |
| #define | vluxei8_v_f32m2_m(...) __riscv_vluxei8_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f32m4(...) __riscv_vluxei8_v_f32m4(__VA_ARGS__) |
| #define | vluxei8_v_f32m4_m(...) __riscv_vluxei8_v_f32m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f32m8(...) __riscv_vluxei8_v_f32m8(__VA_ARGS__) |
| #define | vluxei8_v_f32m8_m(...) __riscv_vluxei8_v_f32m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f32mf2(...) __riscv_vluxei8_v_f32mf2(__VA_ARGS__) |
| #define | vluxei8_v_f32mf2_m(...) __riscv_vluxei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f64m1(...) __riscv_vluxei8_v_f64m1(__VA_ARGS__) |
| #define | vluxei8_v_f64m1_m(...) __riscv_vluxei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f64m2(...) __riscv_vluxei8_v_f64m2(__VA_ARGS__) |
| #define | vluxei8_v_f64m2_m(...) __riscv_vluxei8_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f64m4(...) __riscv_vluxei8_v_f64m4(__VA_ARGS__) |
| #define | vluxei8_v_f64m4_m(...) __riscv_vluxei8_v_f64m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_f64m8(...) __riscv_vluxei8_v_f64m8(__VA_ARGS__) |
| #define | vluxei8_v_f64m8_m(...) __riscv_vluxei8_v_f64m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i16m1(...) __riscv_vluxei8_v_i16m1(__VA_ARGS__) |
| #define | vluxei8_v_i16m1_m(...) __riscv_vluxei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i16m2(...) __riscv_vluxei8_v_i16m2(__VA_ARGS__) |
| #define | vluxei8_v_i16m2_m(...) __riscv_vluxei8_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i16m4(...) __riscv_vluxei8_v_i16m4(__VA_ARGS__) |
| #define | vluxei8_v_i16m4_m(...) __riscv_vluxei8_v_i16m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i16m8(...) __riscv_vluxei8_v_i16m8(__VA_ARGS__) |
| #define | vluxei8_v_i16m8_m(...) __riscv_vluxei8_v_i16m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i16mf2(...) __riscv_vluxei8_v_i16mf2(__VA_ARGS__) |
| #define | vluxei8_v_i16mf2_m(...) __riscv_vluxei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i16mf4(...) __riscv_vluxei8_v_i16mf4(__VA_ARGS__) |
| #define | vluxei8_v_i16mf4_m(...) __riscv_vluxei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i32m1(...) __riscv_vluxei8_v_i32m1(__VA_ARGS__) |
| #define | vluxei8_v_i32m1_m(...) __riscv_vluxei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i32m2(...) __riscv_vluxei8_v_i32m2(__VA_ARGS__) |
| #define | vluxei8_v_i32m2_m(...) __riscv_vluxei8_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i32m4(...) __riscv_vluxei8_v_i32m4(__VA_ARGS__) |
| #define | vluxei8_v_i32m4_m(...) __riscv_vluxei8_v_i32m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i32m8(...) __riscv_vluxei8_v_i32m8(__VA_ARGS__) |
| #define | vluxei8_v_i32m8_m(...) __riscv_vluxei8_v_i32m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i32mf2(...) __riscv_vluxei8_v_i32mf2(__VA_ARGS__) |
| #define | vluxei8_v_i32mf2_m(...) __riscv_vluxei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i64m1(...) __riscv_vluxei8_v_i64m1(__VA_ARGS__) |
| #define | vluxei8_v_i64m1_m(...) __riscv_vluxei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i64m2(...) __riscv_vluxei8_v_i64m2(__VA_ARGS__) |
| #define | vluxei8_v_i64m2_m(...) __riscv_vluxei8_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i64m4(...) __riscv_vluxei8_v_i64m4(__VA_ARGS__) |
| #define | vluxei8_v_i64m4_m(...) __riscv_vluxei8_v_i64m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i64m8(...) __riscv_vluxei8_v_i64m8(__VA_ARGS__) |
| #define | vluxei8_v_i64m8_m(...) __riscv_vluxei8_v_i64m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i8m1(...) __riscv_vluxei8_v_i8m1(__VA_ARGS__) |
| #define | vluxei8_v_i8m1_m(...) __riscv_vluxei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i8m2(...) __riscv_vluxei8_v_i8m2(__VA_ARGS__) |
| #define | vluxei8_v_i8m2_m(...) __riscv_vluxei8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i8m4(...) __riscv_vluxei8_v_i8m4(__VA_ARGS__) |
| #define | vluxei8_v_i8m4_m(...) __riscv_vluxei8_v_i8m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i8m8(...) __riscv_vluxei8_v_i8m8(__VA_ARGS__) |
| #define | vluxei8_v_i8m8_m(...) __riscv_vluxei8_v_i8m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i8mf2(...) __riscv_vluxei8_v_i8mf2(__VA_ARGS__) |
| #define | vluxei8_v_i8mf2_m(...) __riscv_vluxei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i8mf4(...) __riscv_vluxei8_v_i8mf4(__VA_ARGS__) |
| #define | vluxei8_v_i8mf4_m(...) __riscv_vluxei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_i8mf8(...) __riscv_vluxei8_v_i8mf8(__VA_ARGS__) |
| #define | vluxei8_v_i8mf8_m(...) __riscv_vluxei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u16m1(...) __riscv_vluxei8_v_u16m1(__VA_ARGS__) |
| #define | vluxei8_v_u16m1_m(...) __riscv_vluxei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u16m2(...) __riscv_vluxei8_v_u16m2(__VA_ARGS__) |
| #define | vluxei8_v_u16m2_m(...) __riscv_vluxei8_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u16m4(...) __riscv_vluxei8_v_u16m4(__VA_ARGS__) |
| #define | vluxei8_v_u16m4_m(...) __riscv_vluxei8_v_u16m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u16m8(...) __riscv_vluxei8_v_u16m8(__VA_ARGS__) |
| #define | vluxei8_v_u16m8_m(...) __riscv_vluxei8_v_u16m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u16mf2(...) __riscv_vluxei8_v_u16mf2(__VA_ARGS__) |
| #define | vluxei8_v_u16mf2_m(...) __riscv_vluxei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u16mf4(...) __riscv_vluxei8_v_u16mf4(__VA_ARGS__) |
| #define | vluxei8_v_u16mf4_m(...) __riscv_vluxei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u32m1(...) __riscv_vluxei8_v_u32m1(__VA_ARGS__) |
| #define | vluxei8_v_u32m1_m(...) __riscv_vluxei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u32m2(...) __riscv_vluxei8_v_u32m2(__VA_ARGS__) |
| #define | vluxei8_v_u32m2_m(...) __riscv_vluxei8_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u32m4(...) __riscv_vluxei8_v_u32m4(__VA_ARGS__) |
| #define | vluxei8_v_u32m4_m(...) __riscv_vluxei8_v_u32m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u32m8(...) __riscv_vluxei8_v_u32m8(__VA_ARGS__) |
| #define | vluxei8_v_u32m8_m(...) __riscv_vluxei8_v_u32m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u32mf2(...) __riscv_vluxei8_v_u32mf2(__VA_ARGS__) |
| #define | vluxei8_v_u32mf2_m(...) __riscv_vluxei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u64m1(...) __riscv_vluxei8_v_u64m1(__VA_ARGS__) |
| #define | vluxei8_v_u64m1_m(...) __riscv_vluxei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u64m2(...) __riscv_vluxei8_v_u64m2(__VA_ARGS__) |
| #define | vluxei8_v_u64m2_m(...) __riscv_vluxei8_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u64m4(...) __riscv_vluxei8_v_u64m4(__VA_ARGS__) |
| #define | vluxei8_v_u64m4_m(...) __riscv_vluxei8_v_u64m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u64m8(...) __riscv_vluxei8_v_u64m8(__VA_ARGS__) |
| #define | vluxei8_v_u64m8_m(...) __riscv_vluxei8_v_u64m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u8m1(...) __riscv_vluxei8_v_u8m1(__VA_ARGS__) |
| #define | vluxei8_v_u8m1_m(...) __riscv_vluxei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u8m2(...) __riscv_vluxei8_v_u8m2(__VA_ARGS__) |
| #define | vluxei8_v_u8m2_m(...) __riscv_vluxei8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u8m4(...) __riscv_vluxei8_v_u8m4(__VA_ARGS__) |
| #define | vluxei8_v_u8m4_m(...) __riscv_vluxei8_v_u8m4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u8m8(...) __riscv_vluxei8_v_u8m8(__VA_ARGS__) |
| #define | vluxei8_v_u8m8_m(...) __riscv_vluxei8_v_u8m8_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u8mf2(...) __riscv_vluxei8_v_u8mf2(__VA_ARGS__) |
| #define | vluxei8_v_u8mf2_m(...) __riscv_vluxei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u8mf4(...) __riscv_vluxei8_v_u8mf4(__VA_ARGS__) |
| #define | vluxei8_v_u8mf4_m(...) __riscv_vluxei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxei8_v_u8mf8(...) __riscv_vluxei8_v_u8mf8(__VA_ARGS__) |
| #define | vluxei8_v_u8mf8_m(...) __riscv_vluxei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16m1(...) __riscv_vluxseg2ei16_v_f16m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16m1_m(...) __riscv_vluxseg2ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16m2(...) __riscv_vluxseg2ei16_v_f16m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16m2_m(...) __riscv_vluxseg2ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16m4(...) __riscv_vluxseg2ei16_v_f16m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16m4_m(...) __riscv_vluxseg2ei16_v_f16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16mf2(...) __riscv_vluxseg2ei16_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16mf2_m(...) __riscv_vluxseg2ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16mf4(...) __riscv_vluxseg2ei16_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f16mf4_m(...) __riscv_vluxseg2ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f32m1(...) __riscv_vluxseg2ei16_v_f32m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f32m1_m(...) __riscv_vluxseg2ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f32m2(...) __riscv_vluxseg2ei16_v_f32m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f32m2_m(...) __riscv_vluxseg2ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f32m4(...) __riscv_vluxseg2ei16_v_f32m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f32m4_m(...) __riscv_vluxseg2ei16_v_f32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f32mf2(...) __riscv_vluxseg2ei16_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f32mf2_m(...) __riscv_vluxseg2ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f64m1(...) __riscv_vluxseg2ei16_v_f64m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f64m1_m(...) __riscv_vluxseg2ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f64m2(...) __riscv_vluxseg2ei16_v_f64m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f64m2_m(...) __riscv_vluxseg2ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f64m4(...) __riscv_vluxseg2ei16_v_f64m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_f64m4_m(...) __riscv_vluxseg2ei16_v_f64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16m1(...) __riscv_vluxseg2ei16_v_i16m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16m1_m(...) __riscv_vluxseg2ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16m2(...) __riscv_vluxseg2ei16_v_i16m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16m2_m(...) __riscv_vluxseg2ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16m4(...) __riscv_vluxseg2ei16_v_i16m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16m4_m(...) __riscv_vluxseg2ei16_v_i16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16mf2(...) __riscv_vluxseg2ei16_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16mf2_m(...) __riscv_vluxseg2ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16mf4(...) __riscv_vluxseg2ei16_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i16mf4_m(...) __riscv_vluxseg2ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i32m1(...) __riscv_vluxseg2ei16_v_i32m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i32m1_m(...) __riscv_vluxseg2ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i32m2(...) __riscv_vluxseg2ei16_v_i32m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i32m2_m(...) __riscv_vluxseg2ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i32m4(...) __riscv_vluxseg2ei16_v_i32m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i32m4_m(...) __riscv_vluxseg2ei16_v_i32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i32mf2(...) __riscv_vluxseg2ei16_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i32mf2_m(...) __riscv_vluxseg2ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i64m1(...) __riscv_vluxseg2ei16_v_i64m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i64m1_m(...) __riscv_vluxseg2ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i64m2(...) __riscv_vluxseg2ei16_v_i64m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i64m2_m(...) __riscv_vluxseg2ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i64m4(...) __riscv_vluxseg2ei16_v_i64m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i64m4_m(...) __riscv_vluxseg2ei16_v_i64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8m1(...) __riscv_vluxseg2ei16_v_i8m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8m1_m(...) __riscv_vluxseg2ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8m2(...) __riscv_vluxseg2ei16_v_i8m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8m2_m(...) __riscv_vluxseg2ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8m4(...) __riscv_vluxseg2ei16_v_i8m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8m4_m(...) __riscv_vluxseg2ei16_v_i8m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8mf2(...) __riscv_vluxseg2ei16_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8mf2_m(...) __riscv_vluxseg2ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8mf4(...) __riscv_vluxseg2ei16_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8mf4_m(...) __riscv_vluxseg2ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8mf8(...) __riscv_vluxseg2ei16_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg2ei16_v_i8mf8_m(...) __riscv_vluxseg2ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16m1(...) __riscv_vluxseg2ei16_v_u16m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16m1_m(...) __riscv_vluxseg2ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16m2(...) __riscv_vluxseg2ei16_v_u16m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16m2_m(...) __riscv_vluxseg2ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16m4(...) __riscv_vluxseg2ei16_v_u16m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16m4_m(...) __riscv_vluxseg2ei16_v_u16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16mf2(...) __riscv_vluxseg2ei16_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16mf2_m(...) __riscv_vluxseg2ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16mf4(...) __riscv_vluxseg2ei16_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u16mf4_m(...) __riscv_vluxseg2ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u32m1(...) __riscv_vluxseg2ei16_v_u32m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u32m1_m(...) __riscv_vluxseg2ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u32m2(...) __riscv_vluxseg2ei16_v_u32m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u32m2_m(...) __riscv_vluxseg2ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u32m4(...) __riscv_vluxseg2ei16_v_u32m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u32m4_m(...) __riscv_vluxseg2ei16_v_u32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u32mf2(...) __riscv_vluxseg2ei16_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u32mf2_m(...) __riscv_vluxseg2ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u64m1(...) __riscv_vluxseg2ei16_v_u64m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u64m1_m(...) __riscv_vluxseg2ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u64m2(...) __riscv_vluxseg2ei16_v_u64m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u64m2_m(...) __riscv_vluxseg2ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u64m4(...) __riscv_vluxseg2ei16_v_u64m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u64m4_m(...) __riscv_vluxseg2ei16_v_u64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8m1(...) __riscv_vluxseg2ei16_v_u8m1(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8m1_m(...) __riscv_vluxseg2ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8m2(...) __riscv_vluxseg2ei16_v_u8m2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8m2_m(...) __riscv_vluxseg2ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8m4(...) __riscv_vluxseg2ei16_v_u8m4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8m4_m(...) __riscv_vluxseg2ei16_v_u8m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8mf2(...) __riscv_vluxseg2ei16_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8mf2_m(...) __riscv_vluxseg2ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8mf4(...) __riscv_vluxseg2ei16_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8mf4_m(...) __riscv_vluxseg2ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8mf8(...) __riscv_vluxseg2ei16_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg2ei16_v_u8mf8_m(...) __riscv_vluxseg2ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16m1(...) __riscv_vluxseg2ei32_v_f16m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16m1_m(...) __riscv_vluxseg2ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16m2(...) __riscv_vluxseg2ei32_v_f16m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16m2_m(...) __riscv_vluxseg2ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16m4(...) __riscv_vluxseg2ei32_v_f16m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16m4_m(...) __riscv_vluxseg2ei32_v_f16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16mf2(...) __riscv_vluxseg2ei32_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16mf2_m(...) __riscv_vluxseg2ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16mf4(...) __riscv_vluxseg2ei32_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f16mf4_m(...) __riscv_vluxseg2ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f32m1(...) __riscv_vluxseg2ei32_v_f32m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f32m1_m(...) __riscv_vluxseg2ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f32m2(...) __riscv_vluxseg2ei32_v_f32m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f32m2_m(...) __riscv_vluxseg2ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f32m4(...) __riscv_vluxseg2ei32_v_f32m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f32m4_m(...) __riscv_vluxseg2ei32_v_f32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f32mf2(...) __riscv_vluxseg2ei32_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f32mf2_m(...) __riscv_vluxseg2ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f64m1(...) __riscv_vluxseg2ei32_v_f64m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f64m1_m(...) __riscv_vluxseg2ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f64m2(...) __riscv_vluxseg2ei32_v_f64m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f64m2_m(...) __riscv_vluxseg2ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f64m4(...) __riscv_vluxseg2ei32_v_f64m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_f64m4_m(...) __riscv_vluxseg2ei32_v_f64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16m1(...) __riscv_vluxseg2ei32_v_i16m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16m1_m(...) __riscv_vluxseg2ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16m2(...) __riscv_vluxseg2ei32_v_i16m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16m2_m(...) __riscv_vluxseg2ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16m4(...) __riscv_vluxseg2ei32_v_i16m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16m4_m(...) __riscv_vluxseg2ei32_v_i16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16mf2(...) __riscv_vluxseg2ei32_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16mf2_m(...) __riscv_vluxseg2ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16mf4(...) __riscv_vluxseg2ei32_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i16mf4_m(...) __riscv_vluxseg2ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i32m1(...) __riscv_vluxseg2ei32_v_i32m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i32m1_m(...) __riscv_vluxseg2ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i32m2(...) __riscv_vluxseg2ei32_v_i32m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i32m2_m(...) __riscv_vluxseg2ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i32m4(...) __riscv_vluxseg2ei32_v_i32m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i32m4_m(...) __riscv_vluxseg2ei32_v_i32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i32mf2(...) __riscv_vluxseg2ei32_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i32mf2_m(...) __riscv_vluxseg2ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i64m1(...) __riscv_vluxseg2ei32_v_i64m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i64m1_m(...) __riscv_vluxseg2ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i64m2(...) __riscv_vluxseg2ei32_v_i64m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i64m2_m(...) __riscv_vluxseg2ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i64m4(...) __riscv_vluxseg2ei32_v_i64m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i64m4_m(...) __riscv_vluxseg2ei32_v_i64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8m1(...) __riscv_vluxseg2ei32_v_i8m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8m1_m(...) __riscv_vluxseg2ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8m2(...) __riscv_vluxseg2ei32_v_i8m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8m2_m(...) __riscv_vluxseg2ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8mf2(...) __riscv_vluxseg2ei32_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8mf2_m(...) __riscv_vluxseg2ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8mf4(...) __riscv_vluxseg2ei32_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8mf4_m(...) __riscv_vluxseg2ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8mf8(...) __riscv_vluxseg2ei32_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg2ei32_v_i8mf8_m(...) __riscv_vluxseg2ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16m1(...) __riscv_vluxseg2ei32_v_u16m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16m1_m(...) __riscv_vluxseg2ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16m2(...) __riscv_vluxseg2ei32_v_u16m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16m2_m(...) __riscv_vluxseg2ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16m4(...) __riscv_vluxseg2ei32_v_u16m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16m4_m(...) __riscv_vluxseg2ei32_v_u16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16mf2(...) __riscv_vluxseg2ei32_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16mf2_m(...) __riscv_vluxseg2ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16mf4(...) __riscv_vluxseg2ei32_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u16mf4_m(...) __riscv_vluxseg2ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u32m1(...) __riscv_vluxseg2ei32_v_u32m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u32m1_m(...) __riscv_vluxseg2ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u32m2(...) __riscv_vluxseg2ei32_v_u32m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u32m2_m(...) __riscv_vluxseg2ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u32m4(...) __riscv_vluxseg2ei32_v_u32m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u32m4_m(...) __riscv_vluxseg2ei32_v_u32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u32mf2(...) __riscv_vluxseg2ei32_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u32mf2_m(...) __riscv_vluxseg2ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u64m1(...) __riscv_vluxseg2ei32_v_u64m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u64m1_m(...) __riscv_vluxseg2ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u64m2(...) __riscv_vluxseg2ei32_v_u64m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u64m2_m(...) __riscv_vluxseg2ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u64m4(...) __riscv_vluxseg2ei32_v_u64m4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u64m4_m(...) __riscv_vluxseg2ei32_v_u64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8m1(...) __riscv_vluxseg2ei32_v_u8m1(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8m1_m(...) __riscv_vluxseg2ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8m2(...) __riscv_vluxseg2ei32_v_u8m2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8m2_m(...) __riscv_vluxseg2ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8mf2(...) __riscv_vluxseg2ei32_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8mf2_m(...) __riscv_vluxseg2ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8mf4(...) __riscv_vluxseg2ei32_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8mf4_m(...) __riscv_vluxseg2ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8mf8(...) __riscv_vluxseg2ei32_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg2ei32_v_u8mf8_m(...) __riscv_vluxseg2ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f16m1(...) __riscv_vluxseg2ei64_v_f16m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f16m1_m(...) __riscv_vluxseg2ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f16m2(...) __riscv_vluxseg2ei64_v_f16m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f16m2_m(...) __riscv_vluxseg2ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f16mf2(...) __riscv_vluxseg2ei64_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f16mf2_m(...) __riscv_vluxseg2ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f16mf4(...) __riscv_vluxseg2ei64_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f16mf4_m(...) __riscv_vluxseg2ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f32m1(...) __riscv_vluxseg2ei64_v_f32m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f32m1_m(...) __riscv_vluxseg2ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f32m2(...) __riscv_vluxseg2ei64_v_f32m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f32m2_m(...) __riscv_vluxseg2ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f32m4(...) __riscv_vluxseg2ei64_v_f32m4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f32m4_m(...) __riscv_vluxseg2ei64_v_f32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f32mf2(...) __riscv_vluxseg2ei64_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f32mf2_m(...) __riscv_vluxseg2ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f64m1(...) __riscv_vluxseg2ei64_v_f64m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f64m1_m(...) __riscv_vluxseg2ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f64m2(...) __riscv_vluxseg2ei64_v_f64m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f64m2_m(...) __riscv_vluxseg2ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f64m4(...) __riscv_vluxseg2ei64_v_f64m4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_f64m4_m(...) __riscv_vluxseg2ei64_v_f64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i16m1(...) __riscv_vluxseg2ei64_v_i16m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i16m1_m(...) __riscv_vluxseg2ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i16m2(...) __riscv_vluxseg2ei64_v_i16m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i16m2_m(...) __riscv_vluxseg2ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i16mf2(...) __riscv_vluxseg2ei64_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i16mf2_m(...) __riscv_vluxseg2ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i16mf4(...) __riscv_vluxseg2ei64_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i16mf4_m(...) __riscv_vluxseg2ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i32m1(...) __riscv_vluxseg2ei64_v_i32m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i32m1_m(...) __riscv_vluxseg2ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i32m2(...) __riscv_vluxseg2ei64_v_i32m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i32m2_m(...) __riscv_vluxseg2ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i32m4(...) __riscv_vluxseg2ei64_v_i32m4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i32m4_m(...) __riscv_vluxseg2ei64_v_i32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i32mf2(...) __riscv_vluxseg2ei64_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i32mf2_m(...) __riscv_vluxseg2ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i64m1(...) __riscv_vluxseg2ei64_v_i64m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i64m1_m(...) __riscv_vluxseg2ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i64m2(...) __riscv_vluxseg2ei64_v_i64m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i64m2_m(...) __riscv_vluxseg2ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i64m4(...) __riscv_vluxseg2ei64_v_i64m4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i64m4_m(...) __riscv_vluxseg2ei64_v_i64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i8m1(...) __riscv_vluxseg2ei64_v_i8m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i8m1_m(...) __riscv_vluxseg2ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i8mf2(...) __riscv_vluxseg2ei64_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i8mf2_m(...) __riscv_vluxseg2ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i8mf4(...) __riscv_vluxseg2ei64_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i8mf4_m(...) __riscv_vluxseg2ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i8mf8(...) __riscv_vluxseg2ei64_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg2ei64_v_i8mf8_m(...) __riscv_vluxseg2ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u16m1(...) __riscv_vluxseg2ei64_v_u16m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u16m1_m(...) __riscv_vluxseg2ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u16m2(...) __riscv_vluxseg2ei64_v_u16m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u16m2_m(...) __riscv_vluxseg2ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u16mf2(...) __riscv_vluxseg2ei64_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u16mf2_m(...) __riscv_vluxseg2ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u16mf4(...) __riscv_vluxseg2ei64_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u16mf4_m(...) __riscv_vluxseg2ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u32m1(...) __riscv_vluxseg2ei64_v_u32m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u32m1_m(...) __riscv_vluxseg2ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u32m2(...) __riscv_vluxseg2ei64_v_u32m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u32m2_m(...) __riscv_vluxseg2ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u32m4(...) __riscv_vluxseg2ei64_v_u32m4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u32m4_m(...) __riscv_vluxseg2ei64_v_u32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u32mf2(...) __riscv_vluxseg2ei64_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u32mf2_m(...) __riscv_vluxseg2ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u64m1(...) __riscv_vluxseg2ei64_v_u64m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u64m1_m(...) __riscv_vluxseg2ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u64m2(...) __riscv_vluxseg2ei64_v_u64m2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u64m2_m(...) __riscv_vluxseg2ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u64m4(...) __riscv_vluxseg2ei64_v_u64m4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u64m4_m(...) __riscv_vluxseg2ei64_v_u64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u8m1(...) __riscv_vluxseg2ei64_v_u8m1(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u8m1_m(...) __riscv_vluxseg2ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u8mf2(...) __riscv_vluxseg2ei64_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u8mf2_m(...) __riscv_vluxseg2ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u8mf4(...) __riscv_vluxseg2ei64_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u8mf4_m(...) __riscv_vluxseg2ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u8mf8(...) __riscv_vluxseg2ei64_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg2ei64_v_u8mf8_m(...) __riscv_vluxseg2ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16m1(...) __riscv_vluxseg2ei8_v_f16m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16m1_m(...) __riscv_vluxseg2ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16m2(...) __riscv_vluxseg2ei8_v_f16m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16m2_m(...) __riscv_vluxseg2ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16m4(...) __riscv_vluxseg2ei8_v_f16m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16m4_m(...) __riscv_vluxseg2ei8_v_f16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16mf2(...) __riscv_vluxseg2ei8_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16mf2_m(...) __riscv_vluxseg2ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16mf4(...) __riscv_vluxseg2ei8_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f16mf4_m(...) __riscv_vluxseg2ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f32m1(...) __riscv_vluxseg2ei8_v_f32m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f32m1_m(...) __riscv_vluxseg2ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f32m2(...) __riscv_vluxseg2ei8_v_f32m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f32m2_m(...) __riscv_vluxseg2ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f32m4(...) __riscv_vluxseg2ei8_v_f32m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f32m4_m(...) __riscv_vluxseg2ei8_v_f32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f32mf2(...) __riscv_vluxseg2ei8_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f32mf2_m(...) __riscv_vluxseg2ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f64m1(...) __riscv_vluxseg2ei8_v_f64m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f64m1_m(...) __riscv_vluxseg2ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f64m2(...) __riscv_vluxseg2ei8_v_f64m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f64m2_m(...) __riscv_vluxseg2ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f64m4(...) __riscv_vluxseg2ei8_v_f64m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_f64m4_m(...) __riscv_vluxseg2ei8_v_f64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16m1(...) __riscv_vluxseg2ei8_v_i16m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16m1_m(...) __riscv_vluxseg2ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16m2(...) __riscv_vluxseg2ei8_v_i16m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16m2_m(...) __riscv_vluxseg2ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16m4(...) __riscv_vluxseg2ei8_v_i16m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16m4_m(...) __riscv_vluxseg2ei8_v_i16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16mf2(...) __riscv_vluxseg2ei8_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16mf2_m(...) __riscv_vluxseg2ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16mf4(...) __riscv_vluxseg2ei8_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i16mf4_m(...) __riscv_vluxseg2ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i32m1(...) __riscv_vluxseg2ei8_v_i32m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i32m1_m(...) __riscv_vluxseg2ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i32m2(...) __riscv_vluxseg2ei8_v_i32m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i32m2_m(...) __riscv_vluxseg2ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i32m4(...) __riscv_vluxseg2ei8_v_i32m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i32m4_m(...) __riscv_vluxseg2ei8_v_i32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i32mf2(...) __riscv_vluxseg2ei8_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i32mf2_m(...) __riscv_vluxseg2ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i64m1(...) __riscv_vluxseg2ei8_v_i64m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i64m1_m(...) __riscv_vluxseg2ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i64m2(...) __riscv_vluxseg2ei8_v_i64m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i64m2_m(...) __riscv_vluxseg2ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i64m4(...) __riscv_vluxseg2ei8_v_i64m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i64m4_m(...) __riscv_vluxseg2ei8_v_i64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8m1(...) __riscv_vluxseg2ei8_v_i8m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8m1_m(...) __riscv_vluxseg2ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8m2(...) __riscv_vluxseg2ei8_v_i8m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8m2_m(...) __riscv_vluxseg2ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8m4(...) __riscv_vluxseg2ei8_v_i8m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8m4_m(...) __riscv_vluxseg2ei8_v_i8m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8mf2(...) __riscv_vluxseg2ei8_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8mf2_m(...) __riscv_vluxseg2ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8mf4(...) __riscv_vluxseg2ei8_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8mf4_m(...) __riscv_vluxseg2ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8mf8(...) __riscv_vluxseg2ei8_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg2ei8_v_i8mf8_m(...) __riscv_vluxseg2ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16m1(...) __riscv_vluxseg2ei8_v_u16m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16m1_m(...) __riscv_vluxseg2ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16m2(...) __riscv_vluxseg2ei8_v_u16m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16m2_m(...) __riscv_vluxseg2ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16m4(...) __riscv_vluxseg2ei8_v_u16m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16m4_m(...) __riscv_vluxseg2ei8_v_u16m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16mf2(...) __riscv_vluxseg2ei8_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16mf2_m(...) __riscv_vluxseg2ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16mf4(...) __riscv_vluxseg2ei8_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u16mf4_m(...) __riscv_vluxseg2ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u32m1(...) __riscv_vluxseg2ei8_v_u32m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u32m1_m(...) __riscv_vluxseg2ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u32m2(...) __riscv_vluxseg2ei8_v_u32m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u32m2_m(...) __riscv_vluxseg2ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u32m4(...) __riscv_vluxseg2ei8_v_u32m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u32m4_m(...) __riscv_vluxseg2ei8_v_u32m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u32mf2(...) __riscv_vluxseg2ei8_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u32mf2_m(...) __riscv_vluxseg2ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u64m1(...) __riscv_vluxseg2ei8_v_u64m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u64m1_m(...) __riscv_vluxseg2ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u64m2(...) __riscv_vluxseg2ei8_v_u64m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u64m2_m(...) __riscv_vluxseg2ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u64m4(...) __riscv_vluxseg2ei8_v_u64m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u64m4_m(...) __riscv_vluxseg2ei8_v_u64m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8m1(...) __riscv_vluxseg2ei8_v_u8m1(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8m1_m(...) __riscv_vluxseg2ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8m2(...) __riscv_vluxseg2ei8_v_u8m2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8m2_m(...) __riscv_vluxseg2ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8m4(...) __riscv_vluxseg2ei8_v_u8m4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8m4_m(...) __riscv_vluxseg2ei8_v_u8m4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8mf2(...) __riscv_vluxseg2ei8_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8mf2_m(...) __riscv_vluxseg2ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8mf4(...) __riscv_vluxseg2ei8_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8mf4_m(...) __riscv_vluxseg2ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8mf8(...) __riscv_vluxseg2ei8_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg2ei8_v_u8mf8_m(...) __riscv_vluxseg2ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f16m1(...) __riscv_vluxseg3ei16_v_f16m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f16m1_m(...) __riscv_vluxseg3ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f16m2(...) __riscv_vluxseg3ei16_v_f16m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f16m2_m(...) __riscv_vluxseg3ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f16mf2(...) __riscv_vluxseg3ei16_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f16mf2_m(...) __riscv_vluxseg3ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f16mf4(...) __riscv_vluxseg3ei16_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f16mf4_m(...) __riscv_vluxseg3ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f32m1(...) __riscv_vluxseg3ei16_v_f32m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f32m1_m(...) __riscv_vluxseg3ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f32m2(...) __riscv_vluxseg3ei16_v_f32m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f32m2_m(...) __riscv_vluxseg3ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f32mf2(...) __riscv_vluxseg3ei16_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f32mf2_m(...) __riscv_vluxseg3ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f64m1(...) __riscv_vluxseg3ei16_v_f64m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f64m1_m(...) __riscv_vluxseg3ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f64m2(...) __riscv_vluxseg3ei16_v_f64m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_f64m2_m(...) __riscv_vluxseg3ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i16m1(...) __riscv_vluxseg3ei16_v_i16m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i16m1_m(...) __riscv_vluxseg3ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i16m2(...) __riscv_vluxseg3ei16_v_i16m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i16m2_m(...) __riscv_vluxseg3ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i16mf2(...) __riscv_vluxseg3ei16_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i16mf2_m(...) __riscv_vluxseg3ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i16mf4(...) __riscv_vluxseg3ei16_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i16mf4_m(...) __riscv_vluxseg3ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i32m1(...) __riscv_vluxseg3ei16_v_i32m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i32m1_m(...) __riscv_vluxseg3ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i32m2(...) __riscv_vluxseg3ei16_v_i32m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i32m2_m(...) __riscv_vluxseg3ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i32mf2(...) __riscv_vluxseg3ei16_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i32mf2_m(...) __riscv_vluxseg3ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i64m1(...) __riscv_vluxseg3ei16_v_i64m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i64m1_m(...) __riscv_vluxseg3ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i64m2(...) __riscv_vluxseg3ei16_v_i64m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i64m2_m(...) __riscv_vluxseg3ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8m1(...) __riscv_vluxseg3ei16_v_i8m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8m1_m(...) __riscv_vluxseg3ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8m2(...) __riscv_vluxseg3ei16_v_i8m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8m2_m(...) __riscv_vluxseg3ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8mf2(...) __riscv_vluxseg3ei16_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8mf2_m(...) __riscv_vluxseg3ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8mf4(...) __riscv_vluxseg3ei16_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8mf4_m(...) __riscv_vluxseg3ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8mf8(...) __riscv_vluxseg3ei16_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg3ei16_v_i8mf8_m(...) __riscv_vluxseg3ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u16m1(...) __riscv_vluxseg3ei16_v_u16m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u16m1_m(...) __riscv_vluxseg3ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u16m2(...) __riscv_vluxseg3ei16_v_u16m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u16m2_m(...) __riscv_vluxseg3ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u16mf2(...) __riscv_vluxseg3ei16_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u16mf2_m(...) __riscv_vluxseg3ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u16mf4(...) __riscv_vluxseg3ei16_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u16mf4_m(...) __riscv_vluxseg3ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u32m1(...) __riscv_vluxseg3ei16_v_u32m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u32m1_m(...) __riscv_vluxseg3ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u32m2(...) __riscv_vluxseg3ei16_v_u32m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u32m2_m(...) __riscv_vluxseg3ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u32mf2(...) __riscv_vluxseg3ei16_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u32mf2_m(...) __riscv_vluxseg3ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u64m1(...) __riscv_vluxseg3ei16_v_u64m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u64m1_m(...) __riscv_vluxseg3ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u64m2(...) __riscv_vluxseg3ei16_v_u64m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u64m2_m(...) __riscv_vluxseg3ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8m1(...) __riscv_vluxseg3ei16_v_u8m1(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8m1_m(...) __riscv_vluxseg3ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8m2(...) __riscv_vluxseg3ei16_v_u8m2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8m2_m(...) __riscv_vluxseg3ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8mf2(...) __riscv_vluxseg3ei16_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8mf2_m(...) __riscv_vluxseg3ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8mf4(...) __riscv_vluxseg3ei16_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8mf4_m(...) __riscv_vluxseg3ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8mf8(...) __riscv_vluxseg3ei16_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg3ei16_v_u8mf8_m(...) __riscv_vluxseg3ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f16m1(...) __riscv_vluxseg3ei32_v_f16m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f16m1_m(...) __riscv_vluxseg3ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f16m2(...) __riscv_vluxseg3ei32_v_f16m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f16m2_m(...) __riscv_vluxseg3ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f16mf2(...) __riscv_vluxseg3ei32_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f16mf2_m(...) __riscv_vluxseg3ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f16mf4(...) __riscv_vluxseg3ei32_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f16mf4_m(...) __riscv_vluxseg3ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f32m1(...) __riscv_vluxseg3ei32_v_f32m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f32m1_m(...) __riscv_vluxseg3ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f32m2(...) __riscv_vluxseg3ei32_v_f32m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f32m2_m(...) __riscv_vluxseg3ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f32mf2(...) __riscv_vluxseg3ei32_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f32mf2_m(...) __riscv_vluxseg3ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f64m1(...) __riscv_vluxseg3ei32_v_f64m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f64m1_m(...) __riscv_vluxseg3ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f64m2(...) __riscv_vluxseg3ei32_v_f64m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_f64m2_m(...) __riscv_vluxseg3ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i16m1(...) __riscv_vluxseg3ei32_v_i16m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i16m1_m(...) __riscv_vluxseg3ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i16m2(...) __riscv_vluxseg3ei32_v_i16m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i16m2_m(...) __riscv_vluxseg3ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i16mf2(...) __riscv_vluxseg3ei32_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i16mf2_m(...) __riscv_vluxseg3ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i16mf4(...) __riscv_vluxseg3ei32_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i16mf4_m(...) __riscv_vluxseg3ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i32m1(...) __riscv_vluxseg3ei32_v_i32m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i32m1_m(...) __riscv_vluxseg3ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i32m2(...) __riscv_vluxseg3ei32_v_i32m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i32m2_m(...) __riscv_vluxseg3ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i32mf2(...) __riscv_vluxseg3ei32_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i32mf2_m(...) __riscv_vluxseg3ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i64m1(...) __riscv_vluxseg3ei32_v_i64m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i64m1_m(...) __riscv_vluxseg3ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i64m2(...) __riscv_vluxseg3ei32_v_i64m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i64m2_m(...) __riscv_vluxseg3ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8m1(...) __riscv_vluxseg3ei32_v_i8m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8m1_m(...) __riscv_vluxseg3ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8m2(...) __riscv_vluxseg3ei32_v_i8m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8m2_m(...) __riscv_vluxseg3ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8mf2(...) __riscv_vluxseg3ei32_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8mf2_m(...) __riscv_vluxseg3ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8mf4(...) __riscv_vluxseg3ei32_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8mf4_m(...) __riscv_vluxseg3ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8mf8(...) __riscv_vluxseg3ei32_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg3ei32_v_i8mf8_m(...) __riscv_vluxseg3ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u16m1(...) __riscv_vluxseg3ei32_v_u16m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u16m1_m(...) __riscv_vluxseg3ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u16m2(...) __riscv_vluxseg3ei32_v_u16m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u16m2_m(...) __riscv_vluxseg3ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u16mf2(...) __riscv_vluxseg3ei32_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u16mf2_m(...) __riscv_vluxseg3ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u16mf4(...) __riscv_vluxseg3ei32_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u16mf4_m(...) __riscv_vluxseg3ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u32m1(...) __riscv_vluxseg3ei32_v_u32m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u32m1_m(...) __riscv_vluxseg3ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u32m2(...) __riscv_vluxseg3ei32_v_u32m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u32m2_m(...) __riscv_vluxseg3ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u32mf2(...) __riscv_vluxseg3ei32_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u32mf2_m(...) __riscv_vluxseg3ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u64m1(...) __riscv_vluxseg3ei32_v_u64m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u64m1_m(...) __riscv_vluxseg3ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u64m2(...) __riscv_vluxseg3ei32_v_u64m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u64m2_m(...) __riscv_vluxseg3ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8m1(...) __riscv_vluxseg3ei32_v_u8m1(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8m1_m(...) __riscv_vluxseg3ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8m2(...) __riscv_vluxseg3ei32_v_u8m2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8m2_m(...) __riscv_vluxseg3ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8mf2(...) __riscv_vluxseg3ei32_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8mf2_m(...) __riscv_vluxseg3ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8mf4(...) __riscv_vluxseg3ei32_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8mf4_m(...) __riscv_vluxseg3ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8mf8(...) __riscv_vluxseg3ei32_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg3ei32_v_u8mf8_m(...) __riscv_vluxseg3ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f16m1(...) __riscv_vluxseg3ei64_v_f16m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f16m1_m(...) __riscv_vluxseg3ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f16m2(...) __riscv_vluxseg3ei64_v_f16m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f16m2_m(...) __riscv_vluxseg3ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f16mf2(...) __riscv_vluxseg3ei64_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f16mf2_m(...) __riscv_vluxseg3ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f16mf4(...) __riscv_vluxseg3ei64_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f16mf4_m(...) __riscv_vluxseg3ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f32m1(...) __riscv_vluxseg3ei64_v_f32m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f32m1_m(...) __riscv_vluxseg3ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f32m2(...) __riscv_vluxseg3ei64_v_f32m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f32m2_m(...) __riscv_vluxseg3ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f32mf2(...) __riscv_vluxseg3ei64_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f32mf2_m(...) __riscv_vluxseg3ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f64m1(...) __riscv_vluxseg3ei64_v_f64m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f64m1_m(...) __riscv_vluxseg3ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f64m2(...) __riscv_vluxseg3ei64_v_f64m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_f64m2_m(...) __riscv_vluxseg3ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i16m1(...) __riscv_vluxseg3ei64_v_i16m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i16m1_m(...) __riscv_vluxseg3ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i16m2(...) __riscv_vluxseg3ei64_v_i16m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i16m2_m(...) __riscv_vluxseg3ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i16mf2(...) __riscv_vluxseg3ei64_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i16mf2_m(...) __riscv_vluxseg3ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i16mf4(...) __riscv_vluxseg3ei64_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i16mf4_m(...) __riscv_vluxseg3ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i32m1(...) __riscv_vluxseg3ei64_v_i32m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i32m1_m(...) __riscv_vluxseg3ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i32m2(...) __riscv_vluxseg3ei64_v_i32m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i32m2_m(...) __riscv_vluxseg3ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i32mf2(...) __riscv_vluxseg3ei64_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i32mf2_m(...) __riscv_vluxseg3ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i64m1(...) __riscv_vluxseg3ei64_v_i64m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i64m1_m(...) __riscv_vluxseg3ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i64m2(...) __riscv_vluxseg3ei64_v_i64m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i64m2_m(...) __riscv_vluxseg3ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i8m1(...) __riscv_vluxseg3ei64_v_i8m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i8m1_m(...) __riscv_vluxseg3ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i8mf2(...) __riscv_vluxseg3ei64_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i8mf2_m(...) __riscv_vluxseg3ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i8mf4(...) __riscv_vluxseg3ei64_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i8mf4_m(...) __riscv_vluxseg3ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i8mf8(...) __riscv_vluxseg3ei64_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg3ei64_v_i8mf8_m(...) __riscv_vluxseg3ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u16m1(...) __riscv_vluxseg3ei64_v_u16m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u16m1_m(...) __riscv_vluxseg3ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u16m2(...) __riscv_vluxseg3ei64_v_u16m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u16m2_m(...) __riscv_vluxseg3ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u16mf2(...) __riscv_vluxseg3ei64_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u16mf2_m(...) __riscv_vluxseg3ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u16mf4(...) __riscv_vluxseg3ei64_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u16mf4_m(...) __riscv_vluxseg3ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u32m1(...) __riscv_vluxseg3ei64_v_u32m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u32m1_m(...) __riscv_vluxseg3ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u32m2(...) __riscv_vluxseg3ei64_v_u32m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u32m2_m(...) __riscv_vluxseg3ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u32mf2(...) __riscv_vluxseg3ei64_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u32mf2_m(...) __riscv_vluxseg3ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u64m1(...) __riscv_vluxseg3ei64_v_u64m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u64m1_m(...) __riscv_vluxseg3ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u64m2(...) __riscv_vluxseg3ei64_v_u64m2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u64m2_m(...) __riscv_vluxseg3ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u8m1(...) __riscv_vluxseg3ei64_v_u8m1(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u8m1_m(...) __riscv_vluxseg3ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u8mf2(...) __riscv_vluxseg3ei64_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u8mf2_m(...) __riscv_vluxseg3ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u8mf4(...) __riscv_vluxseg3ei64_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u8mf4_m(...) __riscv_vluxseg3ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u8mf8(...) __riscv_vluxseg3ei64_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg3ei64_v_u8mf8_m(...) __riscv_vluxseg3ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f16m1(...) __riscv_vluxseg3ei8_v_f16m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f16m1_m(...) __riscv_vluxseg3ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f16m2(...) __riscv_vluxseg3ei8_v_f16m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f16m2_m(...) __riscv_vluxseg3ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f16mf2(...) __riscv_vluxseg3ei8_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f16mf2_m(...) __riscv_vluxseg3ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f16mf4(...) __riscv_vluxseg3ei8_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f16mf4_m(...) __riscv_vluxseg3ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f32m1(...) __riscv_vluxseg3ei8_v_f32m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f32m1_m(...) __riscv_vluxseg3ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f32m2(...) __riscv_vluxseg3ei8_v_f32m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f32m2_m(...) __riscv_vluxseg3ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f32mf2(...) __riscv_vluxseg3ei8_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f32mf2_m(...) __riscv_vluxseg3ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f64m1(...) __riscv_vluxseg3ei8_v_f64m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f64m1_m(...) __riscv_vluxseg3ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f64m2(...) __riscv_vluxseg3ei8_v_f64m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_f64m2_m(...) __riscv_vluxseg3ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i16m1(...) __riscv_vluxseg3ei8_v_i16m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i16m1_m(...) __riscv_vluxseg3ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i16m2(...) __riscv_vluxseg3ei8_v_i16m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i16m2_m(...) __riscv_vluxseg3ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i16mf2(...) __riscv_vluxseg3ei8_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i16mf2_m(...) __riscv_vluxseg3ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i16mf4(...) __riscv_vluxseg3ei8_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i16mf4_m(...) __riscv_vluxseg3ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i32m1(...) __riscv_vluxseg3ei8_v_i32m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i32m1_m(...) __riscv_vluxseg3ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i32m2(...) __riscv_vluxseg3ei8_v_i32m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i32m2_m(...) __riscv_vluxseg3ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i32mf2(...) __riscv_vluxseg3ei8_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i32mf2_m(...) __riscv_vluxseg3ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i64m1(...) __riscv_vluxseg3ei8_v_i64m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i64m1_m(...) __riscv_vluxseg3ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i64m2(...) __riscv_vluxseg3ei8_v_i64m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i64m2_m(...) __riscv_vluxseg3ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8m1(...) __riscv_vluxseg3ei8_v_i8m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8m1_m(...) __riscv_vluxseg3ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8m2(...) __riscv_vluxseg3ei8_v_i8m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8m2_m(...) __riscv_vluxseg3ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8mf2(...) __riscv_vluxseg3ei8_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8mf2_m(...) __riscv_vluxseg3ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8mf4(...) __riscv_vluxseg3ei8_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8mf4_m(...) __riscv_vluxseg3ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8mf8(...) __riscv_vluxseg3ei8_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg3ei8_v_i8mf8_m(...) __riscv_vluxseg3ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u16m1(...) __riscv_vluxseg3ei8_v_u16m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u16m1_m(...) __riscv_vluxseg3ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u16m2(...) __riscv_vluxseg3ei8_v_u16m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u16m2_m(...) __riscv_vluxseg3ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u16mf2(...) __riscv_vluxseg3ei8_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u16mf2_m(...) __riscv_vluxseg3ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u16mf4(...) __riscv_vluxseg3ei8_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u16mf4_m(...) __riscv_vluxseg3ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u32m1(...) __riscv_vluxseg3ei8_v_u32m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u32m1_m(...) __riscv_vluxseg3ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u32m2(...) __riscv_vluxseg3ei8_v_u32m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u32m2_m(...) __riscv_vluxseg3ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u32mf2(...) __riscv_vluxseg3ei8_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u32mf2_m(...) __riscv_vluxseg3ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u64m1(...) __riscv_vluxseg3ei8_v_u64m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u64m1_m(...) __riscv_vluxseg3ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u64m2(...) __riscv_vluxseg3ei8_v_u64m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u64m2_m(...) __riscv_vluxseg3ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8m1(...) __riscv_vluxseg3ei8_v_u8m1(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8m1_m(...) __riscv_vluxseg3ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8m2(...) __riscv_vluxseg3ei8_v_u8m2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8m2_m(...) __riscv_vluxseg3ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8mf2(...) __riscv_vluxseg3ei8_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8mf2_m(...) __riscv_vluxseg3ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8mf4(...) __riscv_vluxseg3ei8_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8mf4_m(...) __riscv_vluxseg3ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8mf8(...) __riscv_vluxseg3ei8_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg3ei8_v_u8mf8_m(...) __riscv_vluxseg3ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f16m1(...) __riscv_vluxseg4ei16_v_f16m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f16m1_m(...) __riscv_vluxseg4ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f16m2(...) __riscv_vluxseg4ei16_v_f16m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f16m2_m(...) __riscv_vluxseg4ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f16mf2(...) __riscv_vluxseg4ei16_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f16mf2_m(...) __riscv_vluxseg4ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f16mf4(...) __riscv_vluxseg4ei16_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f16mf4_m(...) __riscv_vluxseg4ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f32m1(...) __riscv_vluxseg4ei16_v_f32m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f32m1_m(...) __riscv_vluxseg4ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f32m2(...) __riscv_vluxseg4ei16_v_f32m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f32m2_m(...) __riscv_vluxseg4ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f32mf2(...) __riscv_vluxseg4ei16_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f32mf2_m(...) __riscv_vluxseg4ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f64m1(...) __riscv_vluxseg4ei16_v_f64m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f64m1_m(...) __riscv_vluxseg4ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f64m2(...) __riscv_vluxseg4ei16_v_f64m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_f64m2_m(...) __riscv_vluxseg4ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i16m1(...) __riscv_vluxseg4ei16_v_i16m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i16m1_m(...) __riscv_vluxseg4ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i16m2(...) __riscv_vluxseg4ei16_v_i16m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i16m2_m(...) __riscv_vluxseg4ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i16mf2(...) __riscv_vluxseg4ei16_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i16mf2_m(...) __riscv_vluxseg4ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i16mf4(...) __riscv_vluxseg4ei16_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i16mf4_m(...) __riscv_vluxseg4ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i32m1(...) __riscv_vluxseg4ei16_v_i32m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i32m1_m(...) __riscv_vluxseg4ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i32m2(...) __riscv_vluxseg4ei16_v_i32m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i32m2_m(...) __riscv_vluxseg4ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i32mf2(...) __riscv_vluxseg4ei16_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i32mf2_m(...) __riscv_vluxseg4ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i64m1(...) __riscv_vluxseg4ei16_v_i64m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i64m1_m(...) __riscv_vluxseg4ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i64m2(...) __riscv_vluxseg4ei16_v_i64m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i64m2_m(...) __riscv_vluxseg4ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8m1(...) __riscv_vluxseg4ei16_v_i8m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8m1_m(...) __riscv_vluxseg4ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8m2(...) __riscv_vluxseg4ei16_v_i8m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8m2_m(...) __riscv_vluxseg4ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8mf2(...) __riscv_vluxseg4ei16_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8mf2_m(...) __riscv_vluxseg4ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8mf4(...) __riscv_vluxseg4ei16_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8mf4_m(...) __riscv_vluxseg4ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8mf8(...) __riscv_vluxseg4ei16_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg4ei16_v_i8mf8_m(...) __riscv_vluxseg4ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u16m1(...) __riscv_vluxseg4ei16_v_u16m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u16m1_m(...) __riscv_vluxseg4ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u16m2(...) __riscv_vluxseg4ei16_v_u16m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u16m2_m(...) __riscv_vluxseg4ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u16mf2(...) __riscv_vluxseg4ei16_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u16mf2_m(...) __riscv_vluxseg4ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u16mf4(...) __riscv_vluxseg4ei16_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u16mf4_m(...) __riscv_vluxseg4ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u32m1(...) __riscv_vluxseg4ei16_v_u32m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u32m1_m(...) __riscv_vluxseg4ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u32m2(...) __riscv_vluxseg4ei16_v_u32m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u32m2_m(...) __riscv_vluxseg4ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u32mf2(...) __riscv_vluxseg4ei16_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u32mf2_m(...) __riscv_vluxseg4ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u64m1(...) __riscv_vluxseg4ei16_v_u64m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u64m1_m(...) __riscv_vluxseg4ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u64m2(...) __riscv_vluxseg4ei16_v_u64m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u64m2_m(...) __riscv_vluxseg4ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8m1(...) __riscv_vluxseg4ei16_v_u8m1(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8m1_m(...) __riscv_vluxseg4ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8m2(...) __riscv_vluxseg4ei16_v_u8m2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8m2_m(...) __riscv_vluxseg4ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8mf2(...) __riscv_vluxseg4ei16_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8mf2_m(...) __riscv_vluxseg4ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8mf4(...) __riscv_vluxseg4ei16_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8mf4_m(...) __riscv_vluxseg4ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8mf8(...) __riscv_vluxseg4ei16_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg4ei16_v_u8mf8_m(...) __riscv_vluxseg4ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f16m1(...) __riscv_vluxseg4ei32_v_f16m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f16m1_m(...) __riscv_vluxseg4ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f16m2(...) __riscv_vluxseg4ei32_v_f16m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f16m2_m(...) __riscv_vluxseg4ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f16mf2(...) __riscv_vluxseg4ei32_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f16mf2_m(...) __riscv_vluxseg4ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f16mf4(...) __riscv_vluxseg4ei32_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f16mf4_m(...) __riscv_vluxseg4ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f32m1(...) __riscv_vluxseg4ei32_v_f32m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f32m1_m(...) __riscv_vluxseg4ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f32m2(...) __riscv_vluxseg4ei32_v_f32m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f32m2_m(...) __riscv_vluxseg4ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f32mf2(...) __riscv_vluxseg4ei32_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f32mf2_m(...) __riscv_vluxseg4ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f64m1(...) __riscv_vluxseg4ei32_v_f64m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f64m1_m(...) __riscv_vluxseg4ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f64m2(...) __riscv_vluxseg4ei32_v_f64m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_f64m2_m(...) __riscv_vluxseg4ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i16m1(...) __riscv_vluxseg4ei32_v_i16m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i16m1_m(...) __riscv_vluxseg4ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i16m2(...) __riscv_vluxseg4ei32_v_i16m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i16m2_m(...) __riscv_vluxseg4ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i16mf2(...) __riscv_vluxseg4ei32_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i16mf2_m(...) __riscv_vluxseg4ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i16mf4(...) __riscv_vluxseg4ei32_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i16mf4_m(...) __riscv_vluxseg4ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i32m1(...) __riscv_vluxseg4ei32_v_i32m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i32m1_m(...) __riscv_vluxseg4ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i32m2(...) __riscv_vluxseg4ei32_v_i32m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i32m2_m(...) __riscv_vluxseg4ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i32mf2(...) __riscv_vluxseg4ei32_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i32mf2_m(...) __riscv_vluxseg4ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i64m1(...) __riscv_vluxseg4ei32_v_i64m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i64m1_m(...) __riscv_vluxseg4ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i64m2(...) __riscv_vluxseg4ei32_v_i64m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i64m2_m(...) __riscv_vluxseg4ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8m1(...) __riscv_vluxseg4ei32_v_i8m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8m1_m(...) __riscv_vluxseg4ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8m2(...) __riscv_vluxseg4ei32_v_i8m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8m2_m(...) __riscv_vluxseg4ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8mf2(...) __riscv_vluxseg4ei32_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8mf2_m(...) __riscv_vluxseg4ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8mf4(...) __riscv_vluxseg4ei32_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8mf4_m(...) __riscv_vluxseg4ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8mf8(...) __riscv_vluxseg4ei32_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg4ei32_v_i8mf8_m(...) __riscv_vluxseg4ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u16m1(...) __riscv_vluxseg4ei32_v_u16m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u16m1_m(...) __riscv_vluxseg4ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u16m2(...) __riscv_vluxseg4ei32_v_u16m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u16m2_m(...) __riscv_vluxseg4ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u16mf2(...) __riscv_vluxseg4ei32_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u16mf2_m(...) __riscv_vluxseg4ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u16mf4(...) __riscv_vluxseg4ei32_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u16mf4_m(...) __riscv_vluxseg4ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u32m1(...) __riscv_vluxseg4ei32_v_u32m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u32m1_m(...) __riscv_vluxseg4ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u32m2(...) __riscv_vluxseg4ei32_v_u32m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u32m2_m(...) __riscv_vluxseg4ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u32mf2(...) __riscv_vluxseg4ei32_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u32mf2_m(...) __riscv_vluxseg4ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u64m1(...) __riscv_vluxseg4ei32_v_u64m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u64m1_m(...) __riscv_vluxseg4ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u64m2(...) __riscv_vluxseg4ei32_v_u64m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u64m2_m(...) __riscv_vluxseg4ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8m1(...) __riscv_vluxseg4ei32_v_u8m1(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8m1_m(...) __riscv_vluxseg4ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8m2(...) __riscv_vluxseg4ei32_v_u8m2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8m2_m(...) __riscv_vluxseg4ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8mf2(...) __riscv_vluxseg4ei32_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8mf2_m(...) __riscv_vluxseg4ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8mf4(...) __riscv_vluxseg4ei32_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8mf4_m(...) __riscv_vluxseg4ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8mf8(...) __riscv_vluxseg4ei32_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg4ei32_v_u8mf8_m(...) __riscv_vluxseg4ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f16m1(...) __riscv_vluxseg4ei64_v_f16m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f16m1_m(...) __riscv_vluxseg4ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f16m2(...) __riscv_vluxseg4ei64_v_f16m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f16m2_m(...) __riscv_vluxseg4ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f16mf2(...) __riscv_vluxseg4ei64_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f16mf2_m(...) __riscv_vluxseg4ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f16mf4(...) __riscv_vluxseg4ei64_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f16mf4_m(...) __riscv_vluxseg4ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f32m1(...) __riscv_vluxseg4ei64_v_f32m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f32m1_m(...) __riscv_vluxseg4ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f32m2(...) __riscv_vluxseg4ei64_v_f32m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f32m2_m(...) __riscv_vluxseg4ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f32mf2(...) __riscv_vluxseg4ei64_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f32mf2_m(...) __riscv_vluxseg4ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f64m1(...) __riscv_vluxseg4ei64_v_f64m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f64m1_m(...) __riscv_vluxseg4ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f64m2(...) __riscv_vluxseg4ei64_v_f64m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_f64m2_m(...) __riscv_vluxseg4ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i16m1(...) __riscv_vluxseg4ei64_v_i16m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i16m1_m(...) __riscv_vluxseg4ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i16m2(...) __riscv_vluxseg4ei64_v_i16m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i16m2_m(...) __riscv_vluxseg4ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i16mf2(...) __riscv_vluxseg4ei64_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i16mf2_m(...) __riscv_vluxseg4ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i16mf4(...) __riscv_vluxseg4ei64_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i16mf4_m(...) __riscv_vluxseg4ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i32m1(...) __riscv_vluxseg4ei64_v_i32m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i32m1_m(...) __riscv_vluxseg4ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i32m2(...) __riscv_vluxseg4ei64_v_i32m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i32m2_m(...) __riscv_vluxseg4ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i32mf2(...) __riscv_vluxseg4ei64_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i32mf2_m(...) __riscv_vluxseg4ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i64m1(...) __riscv_vluxseg4ei64_v_i64m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i64m1_m(...) __riscv_vluxseg4ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i64m2(...) __riscv_vluxseg4ei64_v_i64m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i64m2_m(...) __riscv_vluxseg4ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i8m1(...) __riscv_vluxseg4ei64_v_i8m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i8m1_m(...) __riscv_vluxseg4ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i8mf2(...) __riscv_vluxseg4ei64_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i8mf2_m(...) __riscv_vluxseg4ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i8mf4(...) __riscv_vluxseg4ei64_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i8mf4_m(...) __riscv_vluxseg4ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i8mf8(...) __riscv_vluxseg4ei64_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg4ei64_v_i8mf8_m(...) __riscv_vluxseg4ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u16m1(...) __riscv_vluxseg4ei64_v_u16m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u16m1_m(...) __riscv_vluxseg4ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u16m2(...) __riscv_vluxseg4ei64_v_u16m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u16m2_m(...) __riscv_vluxseg4ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u16mf2(...) __riscv_vluxseg4ei64_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u16mf2_m(...) __riscv_vluxseg4ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u16mf4(...) __riscv_vluxseg4ei64_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u16mf4_m(...) __riscv_vluxseg4ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u32m1(...) __riscv_vluxseg4ei64_v_u32m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u32m1_m(...) __riscv_vluxseg4ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u32m2(...) __riscv_vluxseg4ei64_v_u32m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u32m2_m(...) __riscv_vluxseg4ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u32mf2(...) __riscv_vluxseg4ei64_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u32mf2_m(...) __riscv_vluxseg4ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u64m1(...) __riscv_vluxseg4ei64_v_u64m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u64m1_m(...) __riscv_vluxseg4ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u64m2(...) __riscv_vluxseg4ei64_v_u64m2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u64m2_m(...) __riscv_vluxseg4ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u8m1(...) __riscv_vluxseg4ei64_v_u8m1(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u8m1_m(...) __riscv_vluxseg4ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u8mf2(...) __riscv_vluxseg4ei64_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u8mf2_m(...) __riscv_vluxseg4ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u8mf4(...) __riscv_vluxseg4ei64_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u8mf4_m(...) __riscv_vluxseg4ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u8mf8(...) __riscv_vluxseg4ei64_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg4ei64_v_u8mf8_m(...) __riscv_vluxseg4ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f16m1(...) __riscv_vluxseg4ei8_v_f16m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f16m1_m(...) __riscv_vluxseg4ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f16m2(...) __riscv_vluxseg4ei8_v_f16m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f16m2_m(...) __riscv_vluxseg4ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f16mf2(...) __riscv_vluxseg4ei8_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f16mf2_m(...) __riscv_vluxseg4ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f16mf4(...) __riscv_vluxseg4ei8_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f16mf4_m(...) __riscv_vluxseg4ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f32m1(...) __riscv_vluxseg4ei8_v_f32m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f32m1_m(...) __riscv_vluxseg4ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f32m2(...) __riscv_vluxseg4ei8_v_f32m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f32m2_m(...) __riscv_vluxseg4ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f32mf2(...) __riscv_vluxseg4ei8_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f32mf2_m(...) __riscv_vluxseg4ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f64m1(...) __riscv_vluxseg4ei8_v_f64m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f64m1_m(...) __riscv_vluxseg4ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f64m2(...) __riscv_vluxseg4ei8_v_f64m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_f64m2_m(...) __riscv_vluxseg4ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i16m1(...) __riscv_vluxseg4ei8_v_i16m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i16m1_m(...) __riscv_vluxseg4ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i16m2(...) __riscv_vluxseg4ei8_v_i16m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i16m2_m(...) __riscv_vluxseg4ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i16mf2(...) __riscv_vluxseg4ei8_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i16mf2_m(...) __riscv_vluxseg4ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i16mf4(...) __riscv_vluxseg4ei8_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i16mf4_m(...) __riscv_vluxseg4ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i32m1(...) __riscv_vluxseg4ei8_v_i32m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i32m1_m(...) __riscv_vluxseg4ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i32m2(...) __riscv_vluxseg4ei8_v_i32m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i32m2_m(...) __riscv_vluxseg4ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i32mf2(...) __riscv_vluxseg4ei8_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i32mf2_m(...) __riscv_vluxseg4ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i64m1(...) __riscv_vluxseg4ei8_v_i64m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i64m1_m(...) __riscv_vluxseg4ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i64m2(...) __riscv_vluxseg4ei8_v_i64m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i64m2_m(...) __riscv_vluxseg4ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8m1(...) __riscv_vluxseg4ei8_v_i8m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8m1_m(...) __riscv_vluxseg4ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8m2(...) __riscv_vluxseg4ei8_v_i8m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8m2_m(...) __riscv_vluxseg4ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8mf2(...) __riscv_vluxseg4ei8_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8mf2_m(...) __riscv_vluxseg4ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8mf4(...) __riscv_vluxseg4ei8_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8mf4_m(...) __riscv_vluxseg4ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8mf8(...) __riscv_vluxseg4ei8_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg4ei8_v_i8mf8_m(...) __riscv_vluxseg4ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u16m1(...) __riscv_vluxseg4ei8_v_u16m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u16m1_m(...) __riscv_vluxseg4ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u16m2(...) __riscv_vluxseg4ei8_v_u16m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u16m2_m(...) __riscv_vluxseg4ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u16mf2(...) __riscv_vluxseg4ei8_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u16mf2_m(...) __riscv_vluxseg4ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u16mf4(...) __riscv_vluxseg4ei8_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u16mf4_m(...) __riscv_vluxseg4ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u32m1(...) __riscv_vluxseg4ei8_v_u32m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u32m1_m(...) __riscv_vluxseg4ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u32m2(...) __riscv_vluxseg4ei8_v_u32m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u32m2_m(...) __riscv_vluxseg4ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u32mf2(...) __riscv_vluxseg4ei8_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u32mf2_m(...) __riscv_vluxseg4ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u64m1(...) __riscv_vluxseg4ei8_v_u64m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u64m1_m(...) __riscv_vluxseg4ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u64m2(...) __riscv_vluxseg4ei8_v_u64m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u64m2_m(...) __riscv_vluxseg4ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8m1(...) __riscv_vluxseg4ei8_v_u8m1(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8m1_m(...) __riscv_vluxseg4ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8m2(...) __riscv_vluxseg4ei8_v_u8m2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8m2_m(...) __riscv_vluxseg4ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8mf2(...) __riscv_vluxseg4ei8_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8mf2_m(...) __riscv_vluxseg4ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8mf4(...) __riscv_vluxseg4ei8_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8mf4_m(...) __riscv_vluxseg4ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8mf8(...) __riscv_vluxseg4ei8_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg4ei8_v_u8mf8_m(...) __riscv_vluxseg4ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f16m1(...) __riscv_vluxseg5ei16_v_f16m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f16m1_m(...) __riscv_vluxseg5ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f16mf2(...) __riscv_vluxseg5ei16_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f16mf2_m(...) __riscv_vluxseg5ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f16mf4(...) __riscv_vluxseg5ei16_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f16mf4_m(...) __riscv_vluxseg5ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f32m1(...) __riscv_vluxseg5ei16_v_f32m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f32m1_m(...) __riscv_vluxseg5ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f32mf2(...) __riscv_vluxseg5ei16_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f32mf2_m(...) __riscv_vluxseg5ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f64m1(...) __riscv_vluxseg5ei16_v_f64m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_f64m1_m(...) __riscv_vluxseg5ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i16m1(...) __riscv_vluxseg5ei16_v_i16m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i16m1_m(...) __riscv_vluxseg5ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i16mf2(...) __riscv_vluxseg5ei16_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i16mf2_m(...) __riscv_vluxseg5ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i16mf4(...) __riscv_vluxseg5ei16_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i16mf4_m(...) __riscv_vluxseg5ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i32m1(...) __riscv_vluxseg5ei16_v_i32m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i32m1_m(...) __riscv_vluxseg5ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i32mf2(...) __riscv_vluxseg5ei16_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i32mf2_m(...) __riscv_vluxseg5ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i64m1(...) __riscv_vluxseg5ei16_v_i64m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i64m1_m(...) __riscv_vluxseg5ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i8m1(...) __riscv_vluxseg5ei16_v_i8m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i8m1_m(...) __riscv_vluxseg5ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i8mf2(...) __riscv_vluxseg5ei16_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i8mf2_m(...) __riscv_vluxseg5ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i8mf4(...) __riscv_vluxseg5ei16_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i8mf4_m(...) __riscv_vluxseg5ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i8mf8(...) __riscv_vluxseg5ei16_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg5ei16_v_i8mf8_m(...) __riscv_vluxseg5ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u16m1(...) __riscv_vluxseg5ei16_v_u16m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u16m1_m(...) __riscv_vluxseg5ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u16mf2(...) __riscv_vluxseg5ei16_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u16mf2_m(...) __riscv_vluxseg5ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u16mf4(...) __riscv_vluxseg5ei16_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u16mf4_m(...) __riscv_vluxseg5ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u32m1(...) __riscv_vluxseg5ei16_v_u32m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u32m1_m(...) __riscv_vluxseg5ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u32mf2(...) __riscv_vluxseg5ei16_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u32mf2_m(...) __riscv_vluxseg5ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u64m1(...) __riscv_vluxseg5ei16_v_u64m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u64m1_m(...) __riscv_vluxseg5ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u8m1(...) __riscv_vluxseg5ei16_v_u8m1(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u8m1_m(...) __riscv_vluxseg5ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u8mf2(...) __riscv_vluxseg5ei16_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u8mf2_m(...) __riscv_vluxseg5ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u8mf4(...) __riscv_vluxseg5ei16_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u8mf4_m(...) __riscv_vluxseg5ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u8mf8(...) __riscv_vluxseg5ei16_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg5ei16_v_u8mf8_m(...) __riscv_vluxseg5ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f16m1(...) __riscv_vluxseg5ei32_v_f16m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f16m1_m(...) __riscv_vluxseg5ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f16mf2(...) __riscv_vluxseg5ei32_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f16mf2_m(...) __riscv_vluxseg5ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f16mf4(...) __riscv_vluxseg5ei32_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f16mf4_m(...) __riscv_vluxseg5ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f32m1(...) __riscv_vluxseg5ei32_v_f32m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f32m1_m(...) __riscv_vluxseg5ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f32mf2(...) __riscv_vluxseg5ei32_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f32mf2_m(...) __riscv_vluxseg5ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f64m1(...) __riscv_vluxseg5ei32_v_f64m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_f64m1_m(...) __riscv_vluxseg5ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i16m1(...) __riscv_vluxseg5ei32_v_i16m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i16m1_m(...) __riscv_vluxseg5ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i16mf2(...) __riscv_vluxseg5ei32_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i16mf2_m(...) __riscv_vluxseg5ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i16mf4(...) __riscv_vluxseg5ei32_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i16mf4_m(...) __riscv_vluxseg5ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i32m1(...) __riscv_vluxseg5ei32_v_i32m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i32m1_m(...) __riscv_vluxseg5ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i32mf2(...) __riscv_vluxseg5ei32_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i32mf2_m(...) __riscv_vluxseg5ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i64m1(...) __riscv_vluxseg5ei32_v_i64m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i64m1_m(...) __riscv_vluxseg5ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i8m1(...) __riscv_vluxseg5ei32_v_i8m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i8m1_m(...) __riscv_vluxseg5ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i8mf2(...) __riscv_vluxseg5ei32_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i8mf2_m(...) __riscv_vluxseg5ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i8mf4(...) __riscv_vluxseg5ei32_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i8mf4_m(...) __riscv_vluxseg5ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i8mf8(...) __riscv_vluxseg5ei32_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg5ei32_v_i8mf8_m(...) __riscv_vluxseg5ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u16m1(...) __riscv_vluxseg5ei32_v_u16m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u16m1_m(...) __riscv_vluxseg5ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u16mf2(...) __riscv_vluxseg5ei32_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u16mf2_m(...) __riscv_vluxseg5ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u16mf4(...) __riscv_vluxseg5ei32_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u16mf4_m(...) __riscv_vluxseg5ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u32m1(...) __riscv_vluxseg5ei32_v_u32m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u32m1_m(...) __riscv_vluxseg5ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u32mf2(...) __riscv_vluxseg5ei32_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u32mf2_m(...) __riscv_vluxseg5ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u64m1(...) __riscv_vluxseg5ei32_v_u64m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u64m1_m(...) __riscv_vluxseg5ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u8m1(...) __riscv_vluxseg5ei32_v_u8m1(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u8m1_m(...) __riscv_vluxseg5ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u8mf2(...) __riscv_vluxseg5ei32_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u8mf2_m(...) __riscv_vluxseg5ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u8mf4(...) __riscv_vluxseg5ei32_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u8mf4_m(...) __riscv_vluxseg5ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u8mf8(...) __riscv_vluxseg5ei32_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg5ei32_v_u8mf8_m(...) __riscv_vluxseg5ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f16m1(...) __riscv_vluxseg5ei64_v_f16m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f16m1_m(...) __riscv_vluxseg5ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f16mf2(...) __riscv_vluxseg5ei64_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f16mf2_m(...) __riscv_vluxseg5ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f16mf4(...) __riscv_vluxseg5ei64_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f16mf4_m(...) __riscv_vluxseg5ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f32m1(...) __riscv_vluxseg5ei64_v_f32m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f32m1_m(...) __riscv_vluxseg5ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f32mf2(...) __riscv_vluxseg5ei64_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f32mf2_m(...) __riscv_vluxseg5ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f64m1(...) __riscv_vluxseg5ei64_v_f64m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_f64m1_m(...) __riscv_vluxseg5ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i16m1(...) __riscv_vluxseg5ei64_v_i16m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i16m1_m(...) __riscv_vluxseg5ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i16mf2(...) __riscv_vluxseg5ei64_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i16mf2_m(...) __riscv_vluxseg5ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i16mf4(...) __riscv_vluxseg5ei64_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i16mf4_m(...) __riscv_vluxseg5ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i32m1(...) __riscv_vluxseg5ei64_v_i32m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i32m1_m(...) __riscv_vluxseg5ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i32mf2(...) __riscv_vluxseg5ei64_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i32mf2_m(...) __riscv_vluxseg5ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i64m1(...) __riscv_vluxseg5ei64_v_i64m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i64m1_m(...) __riscv_vluxseg5ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i8m1(...) __riscv_vluxseg5ei64_v_i8m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i8m1_m(...) __riscv_vluxseg5ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i8mf2(...) __riscv_vluxseg5ei64_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i8mf2_m(...) __riscv_vluxseg5ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i8mf4(...) __riscv_vluxseg5ei64_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i8mf4_m(...) __riscv_vluxseg5ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i8mf8(...) __riscv_vluxseg5ei64_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg5ei64_v_i8mf8_m(...) __riscv_vluxseg5ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u16m1(...) __riscv_vluxseg5ei64_v_u16m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u16m1_m(...) __riscv_vluxseg5ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u16mf2(...) __riscv_vluxseg5ei64_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u16mf2_m(...) __riscv_vluxseg5ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u16mf4(...) __riscv_vluxseg5ei64_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u16mf4_m(...) __riscv_vluxseg5ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u32m1(...) __riscv_vluxseg5ei64_v_u32m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u32m1_m(...) __riscv_vluxseg5ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u32mf2(...) __riscv_vluxseg5ei64_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u32mf2_m(...) __riscv_vluxseg5ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u64m1(...) __riscv_vluxseg5ei64_v_u64m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u64m1_m(...) __riscv_vluxseg5ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u8m1(...) __riscv_vluxseg5ei64_v_u8m1(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u8m1_m(...) __riscv_vluxseg5ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u8mf2(...) __riscv_vluxseg5ei64_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u8mf2_m(...) __riscv_vluxseg5ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u8mf4(...) __riscv_vluxseg5ei64_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u8mf4_m(...) __riscv_vluxseg5ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u8mf8(...) __riscv_vluxseg5ei64_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg5ei64_v_u8mf8_m(...) __riscv_vluxseg5ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f16m1(...) __riscv_vluxseg5ei8_v_f16m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f16m1_m(...) __riscv_vluxseg5ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f16mf2(...) __riscv_vluxseg5ei8_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f16mf2_m(...) __riscv_vluxseg5ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f16mf4(...) __riscv_vluxseg5ei8_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f16mf4_m(...) __riscv_vluxseg5ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f32m1(...) __riscv_vluxseg5ei8_v_f32m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f32m1_m(...) __riscv_vluxseg5ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f32mf2(...) __riscv_vluxseg5ei8_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f32mf2_m(...) __riscv_vluxseg5ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f64m1(...) __riscv_vluxseg5ei8_v_f64m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_f64m1_m(...) __riscv_vluxseg5ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i16m1(...) __riscv_vluxseg5ei8_v_i16m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i16m1_m(...) __riscv_vluxseg5ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i16mf2(...) __riscv_vluxseg5ei8_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i16mf2_m(...) __riscv_vluxseg5ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i16mf4(...) __riscv_vluxseg5ei8_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i16mf4_m(...) __riscv_vluxseg5ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i32m1(...) __riscv_vluxseg5ei8_v_i32m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i32m1_m(...) __riscv_vluxseg5ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i32mf2(...) __riscv_vluxseg5ei8_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i32mf2_m(...) __riscv_vluxseg5ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i64m1(...) __riscv_vluxseg5ei8_v_i64m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i64m1_m(...) __riscv_vluxseg5ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i8m1(...) __riscv_vluxseg5ei8_v_i8m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i8m1_m(...) __riscv_vluxseg5ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i8mf2(...) __riscv_vluxseg5ei8_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i8mf2_m(...) __riscv_vluxseg5ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i8mf4(...) __riscv_vluxseg5ei8_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i8mf4_m(...) __riscv_vluxseg5ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i8mf8(...) __riscv_vluxseg5ei8_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg5ei8_v_i8mf8_m(...) __riscv_vluxseg5ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u16m1(...) __riscv_vluxseg5ei8_v_u16m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u16m1_m(...) __riscv_vluxseg5ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u16mf2(...) __riscv_vluxseg5ei8_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u16mf2_m(...) __riscv_vluxseg5ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u16mf4(...) __riscv_vluxseg5ei8_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u16mf4_m(...) __riscv_vluxseg5ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u32m1(...) __riscv_vluxseg5ei8_v_u32m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u32m1_m(...) __riscv_vluxseg5ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u32mf2(...) __riscv_vluxseg5ei8_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u32mf2_m(...) __riscv_vluxseg5ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u64m1(...) __riscv_vluxseg5ei8_v_u64m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u64m1_m(...) __riscv_vluxseg5ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u8m1(...) __riscv_vluxseg5ei8_v_u8m1(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u8m1_m(...) __riscv_vluxseg5ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u8mf2(...) __riscv_vluxseg5ei8_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u8mf2_m(...) __riscv_vluxseg5ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u8mf4(...) __riscv_vluxseg5ei8_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u8mf4_m(...) __riscv_vluxseg5ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u8mf8(...) __riscv_vluxseg5ei8_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg5ei8_v_u8mf8_m(...) __riscv_vluxseg5ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f16m1(...) __riscv_vluxseg6ei16_v_f16m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f16m1_m(...) __riscv_vluxseg6ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f16mf2(...) __riscv_vluxseg6ei16_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f16mf2_m(...) __riscv_vluxseg6ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f16mf4(...) __riscv_vluxseg6ei16_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f16mf4_m(...) __riscv_vluxseg6ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f32m1(...) __riscv_vluxseg6ei16_v_f32m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f32m1_m(...) __riscv_vluxseg6ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f32mf2(...) __riscv_vluxseg6ei16_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f32mf2_m(...) __riscv_vluxseg6ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f64m1(...) __riscv_vluxseg6ei16_v_f64m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_f64m1_m(...) __riscv_vluxseg6ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i16m1(...) __riscv_vluxseg6ei16_v_i16m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i16m1_m(...) __riscv_vluxseg6ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i16mf2(...) __riscv_vluxseg6ei16_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i16mf2_m(...) __riscv_vluxseg6ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i16mf4(...) __riscv_vluxseg6ei16_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i16mf4_m(...) __riscv_vluxseg6ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i32m1(...) __riscv_vluxseg6ei16_v_i32m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i32m1_m(...) __riscv_vluxseg6ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i32mf2(...) __riscv_vluxseg6ei16_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i32mf2_m(...) __riscv_vluxseg6ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i64m1(...) __riscv_vluxseg6ei16_v_i64m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i64m1_m(...) __riscv_vluxseg6ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i8m1(...) __riscv_vluxseg6ei16_v_i8m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i8m1_m(...) __riscv_vluxseg6ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i8mf2(...) __riscv_vluxseg6ei16_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i8mf2_m(...) __riscv_vluxseg6ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i8mf4(...) __riscv_vluxseg6ei16_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i8mf4_m(...) __riscv_vluxseg6ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i8mf8(...) __riscv_vluxseg6ei16_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg6ei16_v_i8mf8_m(...) __riscv_vluxseg6ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u16m1(...) __riscv_vluxseg6ei16_v_u16m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u16m1_m(...) __riscv_vluxseg6ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u16mf2(...) __riscv_vluxseg6ei16_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u16mf2_m(...) __riscv_vluxseg6ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u16mf4(...) __riscv_vluxseg6ei16_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u16mf4_m(...) __riscv_vluxseg6ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u32m1(...) __riscv_vluxseg6ei16_v_u32m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u32m1_m(...) __riscv_vluxseg6ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u32mf2(...) __riscv_vluxseg6ei16_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u32mf2_m(...) __riscv_vluxseg6ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u64m1(...) __riscv_vluxseg6ei16_v_u64m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u64m1_m(...) __riscv_vluxseg6ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u8m1(...) __riscv_vluxseg6ei16_v_u8m1(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u8m1_m(...) __riscv_vluxseg6ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u8mf2(...) __riscv_vluxseg6ei16_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u8mf2_m(...) __riscv_vluxseg6ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u8mf4(...) __riscv_vluxseg6ei16_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u8mf4_m(...) __riscv_vluxseg6ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u8mf8(...) __riscv_vluxseg6ei16_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg6ei16_v_u8mf8_m(...) __riscv_vluxseg6ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f16m1(...) __riscv_vluxseg6ei32_v_f16m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f16m1_m(...) __riscv_vluxseg6ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f16mf2(...) __riscv_vluxseg6ei32_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f16mf2_m(...) __riscv_vluxseg6ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f16mf4(...) __riscv_vluxseg6ei32_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f16mf4_m(...) __riscv_vluxseg6ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f32m1(...) __riscv_vluxseg6ei32_v_f32m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f32m1_m(...) __riscv_vluxseg6ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f32mf2(...) __riscv_vluxseg6ei32_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f32mf2_m(...) __riscv_vluxseg6ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f64m1(...) __riscv_vluxseg6ei32_v_f64m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_f64m1_m(...) __riscv_vluxseg6ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i16m1(...) __riscv_vluxseg6ei32_v_i16m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i16m1_m(...) __riscv_vluxseg6ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i16mf2(...) __riscv_vluxseg6ei32_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i16mf2_m(...) __riscv_vluxseg6ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i16mf4(...) __riscv_vluxseg6ei32_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i16mf4_m(...) __riscv_vluxseg6ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i32m1(...) __riscv_vluxseg6ei32_v_i32m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i32m1_m(...) __riscv_vluxseg6ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i32mf2(...) __riscv_vluxseg6ei32_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i32mf2_m(...) __riscv_vluxseg6ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i64m1(...) __riscv_vluxseg6ei32_v_i64m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i64m1_m(...) __riscv_vluxseg6ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i8m1(...) __riscv_vluxseg6ei32_v_i8m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i8m1_m(...) __riscv_vluxseg6ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i8mf2(...) __riscv_vluxseg6ei32_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i8mf2_m(...) __riscv_vluxseg6ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i8mf4(...) __riscv_vluxseg6ei32_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i8mf4_m(...) __riscv_vluxseg6ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i8mf8(...) __riscv_vluxseg6ei32_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg6ei32_v_i8mf8_m(...) __riscv_vluxseg6ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u16m1(...) __riscv_vluxseg6ei32_v_u16m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u16m1_m(...) __riscv_vluxseg6ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u16mf2(...) __riscv_vluxseg6ei32_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u16mf2_m(...) __riscv_vluxseg6ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u16mf4(...) __riscv_vluxseg6ei32_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u16mf4_m(...) __riscv_vluxseg6ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u32m1(...) __riscv_vluxseg6ei32_v_u32m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u32m1_m(...) __riscv_vluxseg6ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u32mf2(...) __riscv_vluxseg6ei32_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u32mf2_m(...) __riscv_vluxseg6ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u64m1(...) __riscv_vluxseg6ei32_v_u64m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u64m1_m(...) __riscv_vluxseg6ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u8m1(...) __riscv_vluxseg6ei32_v_u8m1(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u8m1_m(...) __riscv_vluxseg6ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u8mf2(...) __riscv_vluxseg6ei32_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u8mf2_m(...) __riscv_vluxseg6ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u8mf4(...) __riscv_vluxseg6ei32_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u8mf4_m(...) __riscv_vluxseg6ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u8mf8(...) __riscv_vluxseg6ei32_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg6ei32_v_u8mf8_m(...) __riscv_vluxseg6ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f16m1(...) __riscv_vluxseg6ei64_v_f16m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f16m1_m(...) __riscv_vluxseg6ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f16mf2(...) __riscv_vluxseg6ei64_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f16mf2_m(...) __riscv_vluxseg6ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f16mf4(...) __riscv_vluxseg6ei64_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f16mf4_m(...) __riscv_vluxseg6ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f32m1(...) __riscv_vluxseg6ei64_v_f32m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f32m1_m(...) __riscv_vluxseg6ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f32mf2(...) __riscv_vluxseg6ei64_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f32mf2_m(...) __riscv_vluxseg6ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f64m1(...) __riscv_vluxseg6ei64_v_f64m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_f64m1_m(...) __riscv_vluxseg6ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i16m1(...) __riscv_vluxseg6ei64_v_i16m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i16m1_m(...) __riscv_vluxseg6ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i16mf2(...) __riscv_vluxseg6ei64_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i16mf2_m(...) __riscv_vluxseg6ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i16mf4(...) __riscv_vluxseg6ei64_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i16mf4_m(...) __riscv_vluxseg6ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i32m1(...) __riscv_vluxseg6ei64_v_i32m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i32m1_m(...) __riscv_vluxseg6ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i32mf2(...) __riscv_vluxseg6ei64_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i32mf2_m(...) __riscv_vluxseg6ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i64m1(...) __riscv_vluxseg6ei64_v_i64m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i64m1_m(...) __riscv_vluxseg6ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i8m1(...) __riscv_vluxseg6ei64_v_i8m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i8m1_m(...) __riscv_vluxseg6ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i8mf2(...) __riscv_vluxseg6ei64_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i8mf2_m(...) __riscv_vluxseg6ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i8mf4(...) __riscv_vluxseg6ei64_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i8mf4_m(...) __riscv_vluxseg6ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i8mf8(...) __riscv_vluxseg6ei64_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg6ei64_v_i8mf8_m(...) __riscv_vluxseg6ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u16m1(...) __riscv_vluxseg6ei64_v_u16m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u16m1_m(...) __riscv_vluxseg6ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u16mf2(...) __riscv_vluxseg6ei64_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u16mf2_m(...) __riscv_vluxseg6ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u16mf4(...) __riscv_vluxseg6ei64_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u16mf4_m(...) __riscv_vluxseg6ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u32m1(...) __riscv_vluxseg6ei64_v_u32m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u32m1_m(...) __riscv_vluxseg6ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u32mf2(...) __riscv_vluxseg6ei64_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u32mf2_m(...) __riscv_vluxseg6ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u64m1(...) __riscv_vluxseg6ei64_v_u64m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u64m1_m(...) __riscv_vluxseg6ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u8m1(...) __riscv_vluxseg6ei64_v_u8m1(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u8m1_m(...) __riscv_vluxseg6ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u8mf2(...) __riscv_vluxseg6ei64_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u8mf2_m(...) __riscv_vluxseg6ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u8mf4(...) __riscv_vluxseg6ei64_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u8mf4_m(...) __riscv_vluxseg6ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u8mf8(...) __riscv_vluxseg6ei64_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg6ei64_v_u8mf8_m(...) __riscv_vluxseg6ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f16m1(...) __riscv_vluxseg6ei8_v_f16m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f16m1_m(...) __riscv_vluxseg6ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f16mf2(...) __riscv_vluxseg6ei8_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f16mf2_m(...) __riscv_vluxseg6ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f16mf4(...) __riscv_vluxseg6ei8_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f16mf4_m(...) __riscv_vluxseg6ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f32m1(...) __riscv_vluxseg6ei8_v_f32m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f32m1_m(...) __riscv_vluxseg6ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f32mf2(...) __riscv_vluxseg6ei8_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f32mf2_m(...) __riscv_vluxseg6ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f64m1(...) __riscv_vluxseg6ei8_v_f64m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_f64m1_m(...) __riscv_vluxseg6ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i16m1(...) __riscv_vluxseg6ei8_v_i16m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i16m1_m(...) __riscv_vluxseg6ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i16mf2(...) __riscv_vluxseg6ei8_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i16mf2_m(...) __riscv_vluxseg6ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i16mf4(...) __riscv_vluxseg6ei8_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i16mf4_m(...) __riscv_vluxseg6ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i32m1(...) __riscv_vluxseg6ei8_v_i32m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i32m1_m(...) __riscv_vluxseg6ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i32mf2(...) __riscv_vluxseg6ei8_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i32mf2_m(...) __riscv_vluxseg6ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i64m1(...) __riscv_vluxseg6ei8_v_i64m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i64m1_m(...) __riscv_vluxseg6ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i8m1(...) __riscv_vluxseg6ei8_v_i8m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i8m1_m(...) __riscv_vluxseg6ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i8mf2(...) __riscv_vluxseg6ei8_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i8mf2_m(...) __riscv_vluxseg6ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i8mf4(...) __riscv_vluxseg6ei8_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i8mf4_m(...) __riscv_vluxseg6ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i8mf8(...) __riscv_vluxseg6ei8_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg6ei8_v_i8mf8_m(...) __riscv_vluxseg6ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u16m1(...) __riscv_vluxseg6ei8_v_u16m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u16m1_m(...) __riscv_vluxseg6ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u16mf2(...) __riscv_vluxseg6ei8_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u16mf2_m(...) __riscv_vluxseg6ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u16mf4(...) __riscv_vluxseg6ei8_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u16mf4_m(...) __riscv_vluxseg6ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u32m1(...) __riscv_vluxseg6ei8_v_u32m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u32m1_m(...) __riscv_vluxseg6ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u32mf2(...) __riscv_vluxseg6ei8_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u32mf2_m(...) __riscv_vluxseg6ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u64m1(...) __riscv_vluxseg6ei8_v_u64m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u64m1_m(...) __riscv_vluxseg6ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u8m1(...) __riscv_vluxseg6ei8_v_u8m1(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u8m1_m(...) __riscv_vluxseg6ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u8mf2(...) __riscv_vluxseg6ei8_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u8mf2_m(...) __riscv_vluxseg6ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u8mf4(...) __riscv_vluxseg6ei8_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u8mf4_m(...) __riscv_vluxseg6ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u8mf8(...) __riscv_vluxseg6ei8_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg6ei8_v_u8mf8_m(...) __riscv_vluxseg6ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f16m1(...) __riscv_vluxseg7ei16_v_f16m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f16m1_m(...) __riscv_vluxseg7ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f16mf2(...) __riscv_vluxseg7ei16_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f16mf2_m(...) __riscv_vluxseg7ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f16mf4(...) __riscv_vluxseg7ei16_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f16mf4_m(...) __riscv_vluxseg7ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f32m1(...) __riscv_vluxseg7ei16_v_f32m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f32m1_m(...) __riscv_vluxseg7ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f32mf2(...) __riscv_vluxseg7ei16_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f32mf2_m(...) __riscv_vluxseg7ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f64m1(...) __riscv_vluxseg7ei16_v_f64m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_f64m1_m(...) __riscv_vluxseg7ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i16m1(...) __riscv_vluxseg7ei16_v_i16m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i16m1_m(...) __riscv_vluxseg7ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i16mf2(...) __riscv_vluxseg7ei16_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i16mf2_m(...) __riscv_vluxseg7ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i16mf4(...) __riscv_vluxseg7ei16_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i16mf4_m(...) __riscv_vluxseg7ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i32m1(...) __riscv_vluxseg7ei16_v_i32m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i32m1_m(...) __riscv_vluxseg7ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i32mf2(...) __riscv_vluxseg7ei16_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i32mf2_m(...) __riscv_vluxseg7ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i64m1(...) __riscv_vluxseg7ei16_v_i64m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i64m1_m(...) __riscv_vluxseg7ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i8m1(...) __riscv_vluxseg7ei16_v_i8m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i8m1_m(...) __riscv_vluxseg7ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i8mf2(...) __riscv_vluxseg7ei16_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i8mf2_m(...) __riscv_vluxseg7ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i8mf4(...) __riscv_vluxseg7ei16_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i8mf4_m(...) __riscv_vluxseg7ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i8mf8(...) __riscv_vluxseg7ei16_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg7ei16_v_i8mf8_m(...) __riscv_vluxseg7ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u16m1(...) __riscv_vluxseg7ei16_v_u16m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u16m1_m(...) __riscv_vluxseg7ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u16mf2(...) __riscv_vluxseg7ei16_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u16mf2_m(...) __riscv_vluxseg7ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u16mf4(...) __riscv_vluxseg7ei16_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u16mf4_m(...) __riscv_vluxseg7ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u32m1(...) __riscv_vluxseg7ei16_v_u32m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u32m1_m(...) __riscv_vluxseg7ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u32mf2(...) __riscv_vluxseg7ei16_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u32mf2_m(...) __riscv_vluxseg7ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u64m1(...) __riscv_vluxseg7ei16_v_u64m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u64m1_m(...) __riscv_vluxseg7ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u8m1(...) __riscv_vluxseg7ei16_v_u8m1(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u8m1_m(...) __riscv_vluxseg7ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u8mf2(...) __riscv_vluxseg7ei16_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u8mf2_m(...) __riscv_vluxseg7ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u8mf4(...) __riscv_vluxseg7ei16_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u8mf4_m(...) __riscv_vluxseg7ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u8mf8(...) __riscv_vluxseg7ei16_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg7ei16_v_u8mf8_m(...) __riscv_vluxseg7ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f16m1(...) __riscv_vluxseg7ei32_v_f16m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f16m1_m(...) __riscv_vluxseg7ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f16mf2(...) __riscv_vluxseg7ei32_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f16mf2_m(...) __riscv_vluxseg7ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f16mf4(...) __riscv_vluxseg7ei32_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f16mf4_m(...) __riscv_vluxseg7ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f32m1(...) __riscv_vluxseg7ei32_v_f32m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f32m1_m(...) __riscv_vluxseg7ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f32mf2(...) __riscv_vluxseg7ei32_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f32mf2_m(...) __riscv_vluxseg7ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f64m1(...) __riscv_vluxseg7ei32_v_f64m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_f64m1_m(...) __riscv_vluxseg7ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i16m1(...) __riscv_vluxseg7ei32_v_i16m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i16m1_m(...) __riscv_vluxseg7ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i16mf2(...) __riscv_vluxseg7ei32_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i16mf2_m(...) __riscv_vluxseg7ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i16mf4(...) __riscv_vluxseg7ei32_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i16mf4_m(...) __riscv_vluxseg7ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i32m1(...) __riscv_vluxseg7ei32_v_i32m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i32m1_m(...) __riscv_vluxseg7ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i32mf2(...) __riscv_vluxseg7ei32_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i32mf2_m(...) __riscv_vluxseg7ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i64m1(...) __riscv_vluxseg7ei32_v_i64m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i64m1_m(...) __riscv_vluxseg7ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i8m1(...) __riscv_vluxseg7ei32_v_i8m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i8m1_m(...) __riscv_vluxseg7ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i8mf2(...) __riscv_vluxseg7ei32_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i8mf2_m(...) __riscv_vluxseg7ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i8mf4(...) __riscv_vluxseg7ei32_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i8mf4_m(...) __riscv_vluxseg7ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i8mf8(...) __riscv_vluxseg7ei32_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg7ei32_v_i8mf8_m(...) __riscv_vluxseg7ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u16m1(...) __riscv_vluxseg7ei32_v_u16m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u16m1_m(...) __riscv_vluxseg7ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u16mf2(...) __riscv_vluxseg7ei32_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u16mf2_m(...) __riscv_vluxseg7ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u16mf4(...) __riscv_vluxseg7ei32_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u16mf4_m(...) __riscv_vluxseg7ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u32m1(...) __riscv_vluxseg7ei32_v_u32m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u32m1_m(...) __riscv_vluxseg7ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u32mf2(...) __riscv_vluxseg7ei32_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u32mf2_m(...) __riscv_vluxseg7ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u64m1(...) __riscv_vluxseg7ei32_v_u64m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u64m1_m(...) __riscv_vluxseg7ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u8m1(...) __riscv_vluxseg7ei32_v_u8m1(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u8m1_m(...) __riscv_vluxseg7ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u8mf2(...) __riscv_vluxseg7ei32_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u8mf2_m(...) __riscv_vluxseg7ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u8mf4(...) __riscv_vluxseg7ei32_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u8mf4_m(...) __riscv_vluxseg7ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u8mf8(...) __riscv_vluxseg7ei32_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg7ei32_v_u8mf8_m(...) __riscv_vluxseg7ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f16m1(...) __riscv_vluxseg7ei64_v_f16m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f16m1_m(...) __riscv_vluxseg7ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f16mf2(...) __riscv_vluxseg7ei64_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f16mf2_m(...) __riscv_vluxseg7ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f16mf4(...) __riscv_vluxseg7ei64_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f16mf4_m(...) __riscv_vluxseg7ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f32m1(...) __riscv_vluxseg7ei64_v_f32m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f32m1_m(...) __riscv_vluxseg7ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f32mf2(...) __riscv_vluxseg7ei64_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f32mf2_m(...) __riscv_vluxseg7ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f64m1(...) __riscv_vluxseg7ei64_v_f64m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_f64m1_m(...) __riscv_vluxseg7ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i16m1(...) __riscv_vluxseg7ei64_v_i16m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i16m1_m(...) __riscv_vluxseg7ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i16mf2(...) __riscv_vluxseg7ei64_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i16mf2_m(...) __riscv_vluxseg7ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i16mf4(...) __riscv_vluxseg7ei64_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i16mf4_m(...) __riscv_vluxseg7ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i32m1(...) __riscv_vluxseg7ei64_v_i32m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i32m1_m(...) __riscv_vluxseg7ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i32mf2(...) __riscv_vluxseg7ei64_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i32mf2_m(...) __riscv_vluxseg7ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i64m1(...) __riscv_vluxseg7ei64_v_i64m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i64m1_m(...) __riscv_vluxseg7ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i8m1(...) __riscv_vluxseg7ei64_v_i8m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i8m1_m(...) __riscv_vluxseg7ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i8mf2(...) __riscv_vluxseg7ei64_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i8mf2_m(...) __riscv_vluxseg7ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i8mf4(...) __riscv_vluxseg7ei64_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i8mf4_m(...) __riscv_vluxseg7ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i8mf8(...) __riscv_vluxseg7ei64_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg7ei64_v_i8mf8_m(...) __riscv_vluxseg7ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u16m1(...) __riscv_vluxseg7ei64_v_u16m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u16m1_m(...) __riscv_vluxseg7ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u16mf2(...) __riscv_vluxseg7ei64_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u16mf2_m(...) __riscv_vluxseg7ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u16mf4(...) __riscv_vluxseg7ei64_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u16mf4_m(...) __riscv_vluxseg7ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u32m1(...) __riscv_vluxseg7ei64_v_u32m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u32m1_m(...) __riscv_vluxseg7ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u32mf2(...) __riscv_vluxseg7ei64_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u32mf2_m(...) __riscv_vluxseg7ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u64m1(...) __riscv_vluxseg7ei64_v_u64m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u64m1_m(...) __riscv_vluxseg7ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u8m1(...) __riscv_vluxseg7ei64_v_u8m1(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u8m1_m(...) __riscv_vluxseg7ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u8mf2(...) __riscv_vluxseg7ei64_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u8mf2_m(...) __riscv_vluxseg7ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u8mf4(...) __riscv_vluxseg7ei64_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u8mf4_m(...) __riscv_vluxseg7ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u8mf8(...) __riscv_vluxseg7ei64_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg7ei64_v_u8mf8_m(...) __riscv_vluxseg7ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f16m1(...) __riscv_vluxseg7ei8_v_f16m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f16m1_m(...) __riscv_vluxseg7ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f16mf2(...) __riscv_vluxseg7ei8_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f16mf2_m(...) __riscv_vluxseg7ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f16mf4(...) __riscv_vluxseg7ei8_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f16mf4_m(...) __riscv_vluxseg7ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f32m1(...) __riscv_vluxseg7ei8_v_f32m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f32m1_m(...) __riscv_vluxseg7ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f32mf2(...) __riscv_vluxseg7ei8_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f32mf2_m(...) __riscv_vluxseg7ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f64m1(...) __riscv_vluxseg7ei8_v_f64m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_f64m1_m(...) __riscv_vluxseg7ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i16m1(...) __riscv_vluxseg7ei8_v_i16m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i16m1_m(...) __riscv_vluxseg7ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i16mf2(...) __riscv_vluxseg7ei8_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i16mf2_m(...) __riscv_vluxseg7ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i16mf4(...) __riscv_vluxseg7ei8_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i16mf4_m(...) __riscv_vluxseg7ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i32m1(...) __riscv_vluxseg7ei8_v_i32m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i32m1_m(...) __riscv_vluxseg7ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i32mf2(...) __riscv_vluxseg7ei8_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i32mf2_m(...) __riscv_vluxseg7ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i64m1(...) __riscv_vluxseg7ei8_v_i64m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i64m1_m(...) __riscv_vluxseg7ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i8m1(...) __riscv_vluxseg7ei8_v_i8m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i8m1_m(...) __riscv_vluxseg7ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i8mf2(...) __riscv_vluxseg7ei8_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i8mf2_m(...) __riscv_vluxseg7ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i8mf4(...) __riscv_vluxseg7ei8_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i8mf4_m(...) __riscv_vluxseg7ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i8mf8(...) __riscv_vluxseg7ei8_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg7ei8_v_i8mf8_m(...) __riscv_vluxseg7ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u16m1(...) __riscv_vluxseg7ei8_v_u16m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u16m1_m(...) __riscv_vluxseg7ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u16mf2(...) __riscv_vluxseg7ei8_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u16mf2_m(...) __riscv_vluxseg7ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u16mf4(...) __riscv_vluxseg7ei8_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u16mf4_m(...) __riscv_vluxseg7ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u32m1(...) __riscv_vluxseg7ei8_v_u32m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u32m1_m(...) __riscv_vluxseg7ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u32mf2(...) __riscv_vluxseg7ei8_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u32mf2_m(...) __riscv_vluxseg7ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u64m1(...) __riscv_vluxseg7ei8_v_u64m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u64m1_m(...) __riscv_vluxseg7ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u8m1(...) __riscv_vluxseg7ei8_v_u8m1(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u8m1_m(...) __riscv_vluxseg7ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u8mf2(...) __riscv_vluxseg7ei8_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u8mf2_m(...) __riscv_vluxseg7ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u8mf4(...) __riscv_vluxseg7ei8_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u8mf4_m(...) __riscv_vluxseg7ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u8mf8(...) __riscv_vluxseg7ei8_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg7ei8_v_u8mf8_m(...) __riscv_vluxseg7ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f16m1(...) __riscv_vluxseg8ei16_v_f16m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f16m1_m(...) __riscv_vluxseg8ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f16mf2(...) __riscv_vluxseg8ei16_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f16mf2_m(...) __riscv_vluxseg8ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f16mf4(...) __riscv_vluxseg8ei16_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f16mf4_m(...) __riscv_vluxseg8ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f32m1(...) __riscv_vluxseg8ei16_v_f32m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f32m1_m(...) __riscv_vluxseg8ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f32mf2(...) __riscv_vluxseg8ei16_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f32mf2_m(...) __riscv_vluxseg8ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f64m1(...) __riscv_vluxseg8ei16_v_f64m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_f64m1_m(...) __riscv_vluxseg8ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i16m1(...) __riscv_vluxseg8ei16_v_i16m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i16m1_m(...) __riscv_vluxseg8ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i16mf2(...) __riscv_vluxseg8ei16_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i16mf2_m(...) __riscv_vluxseg8ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i16mf4(...) __riscv_vluxseg8ei16_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i16mf4_m(...) __riscv_vluxseg8ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i32m1(...) __riscv_vluxseg8ei16_v_i32m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i32m1_m(...) __riscv_vluxseg8ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i32mf2(...) __riscv_vluxseg8ei16_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i32mf2_m(...) __riscv_vluxseg8ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i64m1(...) __riscv_vluxseg8ei16_v_i64m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i64m1_m(...) __riscv_vluxseg8ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i8m1(...) __riscv_vluxseg8ei16_v_i8m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i8m1_m(...) __riscv_vluxseg8ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i8mf2(...) __riscv_vluxseg8ei16_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i8mf2_m(...) __riscv_vluxseg8ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i8mf4(...) __riscv_vluxseg8ei16_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i8mf4_m(...) __riscv_vluxseg8ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i8mf8(...) __riscv_vluxseg8ei16_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg8ei16_v_i8mf8_m(...) __riscv_vluxseg8ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u16m1(...) __riscv_vluxseg8ei16_v_u16m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u16m1_m(...) __riscv_vluxseg8ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u16mf2(...) __riscv_vluxseg8ei16_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u16mf2_m(...) __riscv_vluxseg8ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u16mf4(...) __riscv_vluxseg8ei16_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u16mf4_m(...) __riscv_vluxseg8ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u32m1(...) __riscv_vluxseg8ei16_v_u32m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u32m1_m(...) __riscv_vluxseg8ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u32mf2(...) __riscv_vluxseg8ei16_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u32mf2_m(...) __riscv_vluxseg8ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u64m1(...) __riscv_vluxseg8ei16_v_u64m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u64m1_m(...) __riscv_vluxseg8ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u8m1(...) __riscv_vluxseg8ei16_v_u8m1(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u8m1_m(...) __riscv_vluxseg8ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u8mf2(...) __riscv_vluxseg8ei16_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u8mf2_m(...) __riscv_vluxseg8ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u8mf4(...) __riscv_vluxseg8ei16_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u8mf4_m(...) __riscv_vluxseg8ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u8mf8(...) __riscv_vluxseg8ei16_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg8ei16_v_u8mf8_m(...) __riscv_vluxseg8ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f16m1(...) __riscv_vluxseg8ei32_v_f16m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f16m1_m(...) __riscv_vluxseg8ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f16mf2(...) __riscv_vluxseg8ei32_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f16mf2_m(...) __riscv_vluxseg8ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f16mf4(...) __riscv_vluxseg8ei32_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f16mf4_m(...) __riscv_vluxseg8ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f32m1(...) __riscv_vluxseg8ei32_v_f32m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f32m1_m(...) __riscv_vluxseg8ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f32mf2(...) __riscv_vluxseg8ei32_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f32mf2_m(...) __riscv_vluxseg8ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f64m1(...) __riscv_vluxseg8ei32_v_f64m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_f64m1_m(...) __riscv_vluxseg8ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i16m1(...) __riscv_vluxseg8ei32_v_i16m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i16m1_m(...) __riscv_vluxseg8ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i16mf2(...) __riscv_vluxseg8ei32_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i16mf2_m(...) __riscv_vluxseg8ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i16mf4(...) __riscv_vluxseg8ei32_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i16mf4_m(...) __riscv_vluxseg8ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i32m1(...) __riscv_vluxseg8ei32_v_i32m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i32m1_m(...) __riscv_vluxseg8ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i32mf2(...) __riscv_vluxseg8ei32_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i32mf2_m(...) __riscv_vluxseg8ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i64m1(...) __riscv_vluxseg8ei32_v_i64m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i64m1_m(...) __riscv_vluxseg8ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i8m1(...) __riscv_vluxseg8ei32_v_i8m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i8m1_m(...) __riscv_vluxseg8ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i8mf2(...) __riscv_vluxseg8ei32_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i8mf2_m(...) __riscv_vluxseg8ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i8mf4(...) __riscv_vluxseg8ei32_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i8mf4_m(...) __riscv_vluxseg8ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i8mf8(...) __riscv_vluxseg8ei32_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg8ei32_v_i8mf8_m(...) __riscv_vluxseg8ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u16m1(...) __riscv_vluxseg8ei32_v_u16m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u16m1_m(...) __riscv_vluxseg8ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u16mf2(...) __riscv_vluxseg8ei32_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u16mf2_m(...) __riscv_vluxseg8ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u16mf4(...) __riscv_vluxseg8ei32_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u16mf4_m(...) __riscv_vluxseg8ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u32m1(...) __riscv_vluxseg8ei32_v_u32m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u32m1_m(...) __riscv_vluxseg8ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u32mf2(...) __riscv_vluxseg8ei32_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u32mf2_m(...) __riscv_vluxseg8ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u64m1(...) __riscv_vluxseg8ei32_v_u64m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u64m1_m(...) __riscv_vluxseg8ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u8m1(...) __riscv_vluxseg8ei32_v_u8m1(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u8m1_m(...) __riscv_vluxseg8ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u8mf2(...) __riscv_vluxseg8ei32_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u8mf2_m(...) __riscv_vluxseg8ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u8mf4(...) __riscv_vluxseg8ei32_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u8mf4_m(...) __riscv_vluxseg8ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u8mf8(...) __riscv_vluxseg8ei32_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg8ei32_v_u8mf8_m(...) __riscv_vluxseg8ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f16m1(...) __riscv_vluxseg8ei64_v_f16m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f16m1_m(...) __riscv_vluxseg8ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f16mf2(...) __riscv_vluxseg8ei64_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f16mf2_m(...) __riscv_vluxseg8ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f16mf4(...) __riscv_vluxseg8ei64_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f16mf4_m(...) __riscv_vluxseg8ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f32m1(...) __riscv_vluxseg8ei64_v_f32m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f32m1_m(...) __riscv_vluxseg8ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f32mf2(...) __riscv_vluxseg8ei64_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f32mf2_m(...) __riscv_vluxseg8ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f64m1(...) __riscv_vluxseg8ei64_v_f64m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_f64m1_m(...) __riscv_vluxseg8ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i16m1(...) __riscv_vluxseg8ei64_v_i16m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i16m1_m(...) __riscv_vluxseg8ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i16mf2(...) __riscv_vluxseg8ei64_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i16mf2_m(...) __riscv_vluxseg8ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i16mf4(...) __riscv_vluxseg8ei64_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i16mf4_m(...) __riscv_vluxseg8ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i32m1(...) __riscv_vluxseg8ei64_v_i32m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i32m1_m(...) __riscv_vluxseg8ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i32mf2(...) __riscv_vluxseg8ei64_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i32mf2_m(...) __riscv_vluxseg8ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i64m1(...) __riscv_vluxseg8ei64_v_i64m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i64m1_m(...) __riscv_vluxseg8ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i8m1(...) __riscv_vluxseg8ei64_v_i8m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i8m1_m(...) __riscv_vluxseg8ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i8mf2(...) __riscv_vluxseg8ei64_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i8mf2_m(...) __riscv_vluxseg8ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i8mf4(...) __riscv_vluxseg8ei64_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i8mf4_m(...) __riscv_vluxseg8ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i8mf8(...) __riscv_vluxseg8ei64_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg8ei64_v_i8mf8_m(...) __riscv_vluxseg8ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u16m1(...) __riscv_vluxseg8ei64_v_u16m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u16m1_m(...) __riscv_vluxseg8ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u16mf2(...) __riscv_vluxseg8ei64_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u16mf2_m(...) __riscv_vluxseg8ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u16mf4(...) __riscv_vluxseg8ei64_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u16mf4_m(...) __riscv_vluxseg8ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u32m1(...) __riscv_vluxseg8ei64_v_u32m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u32m1_m(...) __riscv_vluxseg8ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u32mf2(...) __riscv_vluxseg8ei64_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u32mf2_m(...) __riscv_vluxseg8ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u64m1(...) __riscv_vluxseg8ei64_v_u64m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u64m1_m(...) __riscv_vluxseg8ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u8m1(...) __riscv_vluxseg8ei64_v_u8m1(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u8m1_m(...) __riscv_vluxseg8ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u8mf2(...) __riscv_vluxseg8ei64_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u8mf2_m(...) __riscv_vluxseg8ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u8mf4(...) __riscv_vluxseg8ei64_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u8mf4_m(...) __riscv_vluxseg8ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u8mf8(...) __riscv_vluxseg8ei64_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg8ei64_v_u8mf8_m(...) __riscv_vluxseg8ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f16m1(...) __riscv_vluxseg8ei8_v_f16m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f16m1_m(...) __riscv_vluxseg8ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f16mf2(...) __riscv_vluxseg8ei8_v_f16mf2(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f16mf2_m(...) __riscv_vluxseg8ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f16mf4(...) __riscv_vluxseg8ei8_v_f16mf4(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f16mf4_m(...) __riscv_vluxseg8ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f32m1(...) __riscv_vluxseg8ei8_v_f32m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f32m1_m(...) __riscv_vluxseg8ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f32mf2(...) __riscv_vluxseg8ei8_v_f32mf2(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f32mf2_m(...) __riscv_vluxseg8ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f64m1(...) __riscv_vluxseg8ei8_v_f64m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_f64m1_m(...) __riscv_vluxseg8ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i16m1(...) __riscv_vluxseg8ei8_v_i16m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i16m1_m(...) __riscv_vluxseg8ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i16mf2(...) __riscv_vluxseg8ei8_v_i16mf2(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i16mf2_m(...) __riscv_vluxseg8ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i16mf4(...) __riscv_vluxseg8ei8_v_i16mf4(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i16mf4_m(...) __riscv_vluxseg8ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i32m1(...) __riscv_vluxseg8ei8_v_i32m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i32m1_m(...) __riscv_vluxseg8ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i32mf2(...) __riscv_vluxseg8ei8_v_i32mf2(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i32mf2_m(...) __riscv_vluxseg8ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i64m1(...) __riscv_vluxseg8ei8_v_i64m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i64m1_m(...) __riscv_vluxseg8ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i8m1(...) __riscv_vluxseg8ei8_v_i8m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i8m1_m(...) __riscv_vluxseg8ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i8mf2(...) __riscv_vluxseg8ei8_v_i8mf2(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i8mf2_m(...) __riscv_vluxseg8ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i8mf4(...) __riscv_vluxseg8ei8_v_i8mf4(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i8mf4_m(...) __riscv_vluxseg8ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i8mf8(...) __riscv_vluxseg8ei8_v_i8mf8(__VA_ARGS__) |
| #define | vluxseg8ei8_v_i8mf8_m(...) __riscv_vluxseg8ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u16m1(...) __riscv_vluxseg8ei8_v_u16m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u16m1_m(...) __riscv_vluxseg8ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u16mf2(...) __riscv_vluxseg8ei8_v_u16mf2(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u16mf2_m(...) __riscv_vluxseg8ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u16mf4(...) __riscv_vluxseg8ei8_v_u16mf4(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u16mf4_m(...) __riscv_vluxseg8ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u32m1(...) __riscv_vluxseg8ei8_v_u32m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u32m1_m(...) __riscv_vluxseg8ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u32mf2(...) __riscv_vluxseg8ei8_v_u32mf2(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u32mf2_m(...) __riscv_vluxseg8ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u64m1(...) __riscv_vluxseg8ei8_v_u64m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u64m1_m(...) __riscv_vluxseg8ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u8m1(...) __riscv_vluxseg8ei8_v_u8m1(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u8m1_m(...) __riscv_vluxseg8ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u8mf2(...) __riscv_vluxseg8ei8_v_u8mf2(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u8mf2_m(...) __riscv_vluxseg8ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u8mf4(...) __riscv_vluxseg8ei8_v_u8mf4(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u8mf4_m(...) __riscv_vluxseg8ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u8mf8(...) __riscv_vluxseg8ei8_v_u8mf8(__VA_ARGS__) |
| #define | vluxseg8ei8_v_u8mf8_m(...) __riscv_vluxseg8ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i16m1(...) __riscv_vmacc_vv_i16m1_tu(__VA_ARGS__) |
| #define | vmacc_vv_i16m1_m(...) __riscv_vmacc_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i16m2(...) __riscv_vmacc_vv_i16m2_tu(__VA_ARGS__) |
| #define | vmacc_vv_i16m2_m(...) __riscv_vmacc_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i16m4(...) __riscv_vmacc_vv_i16m4_tu(__VA_ARGS__) |
| #define | vmacc_vv_i16m4_m(...) __riscv_vmacc_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i16m8(...) __riscv_vmacc_vv_i16m8_tu(__VA_ARGS__) |
| #define | vmacc_vv_i16m8_m(...) __riscv_vmacc_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i16mf2(...) __riscv_vmacc_vv_i16mf2_tu(__VA_ARGS__) |
| #define | vmacc_vv_i16mf2_m(...) __riscv_vmacc_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i16mf4(...) __riscv_vmacc_vv_i16mf4_tu(__VA_ARGS__) |
| #define | vmacc_vv_i16mf4_m(...) __riscv_vmacc_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i32m1(...) __riscv_vmacc_vv_i32m1_tu(__VA_ARGS__) |
| #define | vmacc_vv_i32m1_m(...) __riscv_vmacc_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i32m2(...) __riscv_vmacc_vv_i32m2_tu(__VA_ARGS__) |
| #define | vmacc_vv_i32m2_m(...) __riscv_vmacc_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i32m4(...) __riscv_vmacc_vv_i32m4_tu(__VA_ARGS__) |
| #define | vmacc_vv_i32m4_m(...) __riscv_vmacc_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i32m8(...) __riscv_vmacc_vv_i32m8_tu(__VA_ARGS__) |
| #define | vmacc_vv_i32m8_m(...) __riscv_vmacc_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i32mf2(...) __riscv_vmacc_vv_i32mf2_tu(__VA_ARGS__) |
| #define | vmacc_vv_i32mf2_m(...) __riscv_vmacc_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i64m1(...) __riscv_vmacc_vv_i64m1_tu(__VA_ARGS__) |
| #define | vmacc_vv_i64m1_m(...) __riscv_vmacc_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i64m2(...) __riscv_vmacc_vv_i64m2_tu(__VA_ARGS__) |
| #define | vmacc_vv_i64m2_m(...) __riscv_vmacc_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i64m4(...) __riscv_vmacc_vv_i64m4_tu(__VA_ARGS__) |
| #define | vmacc_vv_i64m4_m(...) __riscv_vmacc_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i64m8(...) __riscv_vmacc_vv_i64m8_tu(__VA_ARGS__) |
| #define | vmacc_vv_i64m8_m(...) __riscv_vmacc_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i8m1(...) __riscv_vmacc_vv_i8m1_tu(__VA_ARGS__) |
| #define | vmacc_vv_i8m1_m(...) __riscv_vmacc_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i8m2(...) __riscv_vmacc_vv_i8m2_tu(__VA_ARGS__) |
| #define | vmacc_vv_i8m2_m(...) __riscv_vmacc_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i8m4(...) __riscv_vmacc_vv_i8m4_tu(__VA_ARGS__) |
| #define | vmacc_vv_i8m4_m(...) __riscv_vmacc_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i8m8(...) __riscv_vmacc_vv_i8m8_tu(__VA_ARGS__) |
| #define | vmacc_vv_i8m8_m(...) __riscv_vmacc_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i8mf2(...) __riscv_vmacc_vv_i8mf2_tu(__VA_ARGS__) |
| #define | vmacc_vv_i8mf2_m(...) __riscv_vmacc_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i8mf4(...) __riscv_vmacc_vv_i8mf4_tu(__VA_ARGS__) |
| #define | vmacc_vv_i8mf4_m(...) __riscv_vmacc_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_i8mf8(...) __riscv_vmacc_vv_i8mf8_tu(__VA_ARGS__) |
| #define | vmacc_vv_i8mf8_m(...) __riscv_vmacc_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u16m1(...) __riscv_vmacc_vv_u16m1_tu(__VA_ARGS__) |
| #define | vmacc_vv_u16m1_m(...) __riscv_vmacc_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u16m2(...) __riscv_vmacc_vv_u16m2_tu(__VA_ARGS__) |
| #define | vmacc_vv_u16m2_m(...) __riscv_vmacc_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u16m4(...) __riscv_vmacc_vv_u16m4_tu(__VA_ARGS__) |
| #define | vmacc_vv_u16m4_m(...) __riscv_vmacc_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u16m8(...) __riscv_vmacc_vv_u16m8_tu(__VA_ARGS__) |
| #define | vmacc_vv_u16m8_m(...) __riscv_vmacc_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u16mf2(...) __riscv_vmacc_vv_u16mf2_tu(__VA_ARGS__) |
| #define | vmacc_vv_u16mf2_m(...) __riscv_vmacc_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u16mf4(...) __riscv_vmacc_vv_u16mf4_tu(__VA_ARGS__) |
| #define | vmacc_vv_u16mf4_m(...) __riscv_vmacc_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u32m1(...) __riscv_vmacc_vv_u32m1_tu(__VA_ARGS__) |
| #define | vmacc_vv_u32m1_m(...) __riscv_vmacc_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u32m2(...) __riscv_vmacc_vv_u32m2_tu(__VA_ARGS__) |
| #define | vmacc_vv_u32m2_m(...) __riscv_vmacc_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u32m4(...) __riscv_vmacc_vv_u32m4_tu(__VA_ARGS__) |
| #define | vmacc_vv_u32m4_m(...) __riscv_vmacc_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u32m8(...) __riscv_vmacc_vv_u32m8_tu(__VA_ARGS__) |
| #define | vmacc_vv_u32m8_m(...) __riscv_vmacc_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u32mf2(...) __riscv_vmacc_vv_u32mf2_tu(__VA_ARGS__) |
| #define | vmacc_vv_u32mf2_m(...) __riscv_vmacc_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u64m1(...) __riscv_vmacc_vv_u64m1_tu(__VA_ARGS__) |
| #define | vmacc_vv_u64m1_m(...) __riscv_vmacc_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u64m2(...) __riscv_vmacc_vv_u64m2_tu(__VA_ARGS__) |
| #define | vmacc_vv_u64m2_m(...) __riscv_vmacc_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u64m4(...) __riscv_vmacc_vv_u64m4_tu(__VA_ARGS__) |
| #define | vmacc_vv_u64m4_m(...) __riscv_vmacc_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u64m8(...) __riscv_vmacc_vv_u64m8_tu(__VA_ARGS__) |
| #define | vmacc_vv_u64m8_m(...) __riscv_vmacc_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u8m1(...) __riscv_vmacc_vv_u8m1_tu(__VA_ARGS__) |
| #define | vmacc_vv_u8m1_m(...) __riscv_vmacc_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u8m2(...) __riscv_vmacc_vv_u8m2_tu(__VA_ARGS__) |
| #define | vmacc_vv_u8m2_m(...) __riscv_vmacc_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u8m4(...) __riscv_vmacc_vv_u8m4_tu(__VA_ARGS__) |
| #define | vmacc_vv_u8m4_m(...) __riscv_vmacc_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u8m8(...) __riscv_vmacc_vv_u8m8_tu(__VA_ARGS__) |
| #define | vmacc_vv_u8m8_m(...) __riscv_vmacc_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u8mf2(...) __riscv_vmacc_vv_u8mf2_tu(__VA_ARGS__) |
| #define | vmacc_vv_u8mf2_m(...) __riscv_vmacc_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u8mf4(...) __riscv_vmacc_vv_u8mf4_tu(__VA_ARGS__) |
| #define | vmacc_vv_u8mf4_m(...) __riscv_vmacc_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vmacc_vv_u8mf8(...) __riscv_vmacc_vv_u8mf8_tu(__VA_ARGS__) |
| #define | vmacc_vv_u8mf8_m(...) __riscv_vmacc_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i16m1(...) __riscv_vmacc_vx_i16m1_tu(__VA_ARGS__) |
| #define | vmacc_vx_i16m1_m(...) __riscv_vmacc_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i16m2(...) __riscv_vmacc_vx_i16m2_tu(__VA_ARGS__) |
| #define | vmacc_vx_i16m2_m(...) __riscv_vmacc_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i16m4(...) __riscv_vmacc_vx_i16m4_tu(__VA_ARGS__) |
| #define | vmacc_vx_i16m4_m(...) __riscv_vmacc_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i16m8(...) __riscv_vmacc_vx_i16m8_tu(__VA_ARGS__) |
| #define | vmacc_vx_i16m8_m(...) __riscv_vmacc_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i16mf2(...) __riscv_vmacc_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vmacc_vx_i16mf2_m(...) __riscv_vmacc_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i16mf4(...) __riscv_vmacc_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vmacc_vx_i16mf4_m(...) __riscv_vmacc_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i32m1(...) __riscv_vmacc_vx_i32m1_tu(__VA_ARGS__) |
| #define | vmacc_vx_i32m1_m(...) __riscv_vmacc_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i32m2(...) __riscv_vmacc_vx_i32m2_tu(__VA_ARGS__) |
| #define | vmacc_vx_i32m2_m(...) __riscv_vmacc_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i32m4(...) __riscv_vmacc_vx_i32m4_tu(__VA_ARGS__) |
| #define | vmacc_vx_i32m4_m(...) __riscv_vmacc_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i32m8(...) __riscv_vmacc_vx_i32m8_tu(__VA_ARGS__) |
| #define | vmacc_vx_i32m8_m(...) __riscv_vmacc_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i32mf2(...) __riscv_vmacc_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vmacc_vx_i32mf2_m(...) __riscv_vmacc_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i64m1(...) __riscv_vmacc_vx_i64m1_tu(__VA_ARGS__) |
| #define | vmacc_vx_i64m1_m(...) __riscv_vmacc_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i64m2(...) __riscv_vmacc_vx_i64m2_tu(__VA_ARGS__) |
| #define | vmacc_vx_i64m2_m(...) __riscv_vmacc_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i64m4(...) __riscv_vmacc_vx_i64m4_tu(__VA_ARGS__) |
| #define | vmacc_vx_i64m4_m(...) __riscv_vmacc_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i64m8(...) __riscv_vmacc_vx_i64m8_tu(__VA_ARGS__) |
| #define | vmacc_vx_i64m8_m(...) __riscv_vmacc_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i8m1(...) __riscv_vmacc_vx_i8m1_tu(__VA_ARGS__) |
| #define | vmacc_vx_i8m1_m(...) __riscv_vmacc_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i8m2(...) __riscv_vmacc_vx_i8m2_tu(__VA_ARGS__) |
| #define | vmacc_vx_i8m2_m(...) __riscv_vmacc_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i8m4(...) __riscv_vmacc_vx_i8m4_tu(__VA_ARGS__) |
| #define | vmacc_vx_i8m4_m(...) __riscv_vmacc_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i8m8(...) __riscv_vmacc_vx_i8m8_tu(__VA_ARGS__) |
| #define | vmacc_vx_i8m8_m(...) __riscv_vmacc_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i8mf2(...) __riscv_vmacc_vx_i8mf2_tu(__VA_ARGS__) |
| #define | vmacc_vx_i8mf2_m(...) __riscv_vmacc_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i8mf4(...) __riscv_vmacc_vx_i8mf4_tu(__VA_ARGS__) |
| #define | vmacc_vx_i8mf4_m(...) __riscv_vmacc_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_i8mf8(...) __riscv_vmacc_vx_i8mf8_tu(__VA_ARGS__) |
| #define | vmacc_vx_i8mf8_m(...) __riscv_vmacc_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u16m1(...) __riscv_vmacc_vx_u16m1_tu(__VA_ARGS__) |
| #define | vmacc_vx_u16m1_m(...) __riscv_vmacc_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u16m2(...) __riscv_vmacc_vx_u16m2_tu(__VA_ARGS__) |
| #define | vmacc_vx_u16m2_m(...) __riscv_vmacc_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u16m4(...) __riscv_vmacc_vx_u16m4_tu(__VA_ARGS__) |
| #define | vmacc_vx_u16m4_m(...) __riscv_vmacc_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u16m8(...) __riscv_vmacc_vx_u16m8_tu(__VA_ARGS__) |
| #define | vmacc_vx_u16m8_m(...) __riscv_vmacc_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u16mf2(...) __riscv_vmacc_vx_u16mf2_tu(__VA_ARGS__) |
| #define | vmacc_vx_u16mf2_m(...) __riscv_vmacc_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u16mf4(...) __riscv_vmacc_vx_u16mf4_tu(__VA_ARGS__) |
| #define | vmacc_vx_u16mf4_m(...) __riscv_vmacc_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u32m1(...) __riscv_vmacc_vx_u32m1_tu(__VA_ARGS__) |
| #define | vmacc_vx_u32m1_m(...) __riscv_vmacc_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u32m2(...) __riscv_vmacc_vx_u32m2_tu(__VA_ARGS__) |
| #define | vmacc_vx_u32m2_m(...) __riscv_vmacc_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u32m4(...) __riscv_vmacc_vx_u32m4_tu(__VA_ARGS__) |
| #define | vmacc_vx_u32m4_m(...) __riscv_vmacc_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u32m8(...) __riscv_vmacc_vx_u32m8_tu(__VA_ARGS__) |
| #define | vmacc_vx_u32m8_m(...) __riscv_vmacc_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u32mf2(...) __riscv_vmacc_vx_u32mf2_tu(__VA_ARGS__) |
| #define | vmacc_vx_u32mf2_m(...) __riscv_vmacc_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u64m1(...) __riscv_vmacc_vx_u64m1_tu(__VA_ARGS__) |
| #define | vmacc_vx_u64m1_m(...) __riscv_vmacc_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u64m2(...) __riscv_vmacc_vx_u64m2_tu(__VA_ARGS__) |
| #define | vmacc_vx_u64m2_m(...) __riscv_vmacc_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u64m4(...) __riscv_vmacc_vx_u64m4_tu(__VA_ARGS__) |
| #define | vmacc_vx_u64m4_m(...) __riscv_vmacc_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u64m8(...) __riscv_vmacc_vx_u64m8_tu(__VA_ARGS__) |
| #define | vmacc_vx_u64m8_m(...) __riscv_vmacc_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u8m1(...) __riscv_vmacc_vx_u8m1_tu(__VA_ARGS__) |
| #define | vmacc_vx_u8m1_m(...) __riscv_vmacc_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u8m2(...) __riscv_vmacc_vx_u8m2_tu(__VA_ARGS__) |
| #define | vmacc_vx_u8m2_m(...) __riscv_vmacc_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u8m4(...) __riscv_vmacc_vx_u8m4_tu(__VA_ARGS__) |
| #define | vmacc_vx_u8m4_m(...) __riscv_vmacc_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u8m8(...) __riscv_vmacc_vx_u8m8_tu(__VA_ARGS__) |
| #define | vmacc_vx_u8m8_m(...) __riscv_vmacc_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u8mf2(...) __riscv_vmacc_vx_u8mf2_tu(__VA_ARGS__) |
| #define | vmacc_vx_u8mf2_m(...) __riscv_vmacc_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u8mf4(...) __riscv_vmacc_vx_u8mf4_tu(__VA_ARGS__) |
| #define | vmacc_vx_u8mf4_m(...) __riscv_vmacc_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vmacc_vx_u8mf8(...) __riscv_vmacc_vx_u8mf8_tu(__VA_ARGS__) |
| #define | vmacc_vx_u8mf8_m(...) __riscv_vmacc_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vmadc_vv_i16m1_b16(...) __riscv_vmadc_vv_i16m1_b16(__VA_ARGS__) |
| #define | vmadc_vv_i16m2_b8(...) __riscv_vmadc_vv_i16m2_b8(__VA_ARGS__) |
| #define | vmadc_vv_i16m4_b4(...) __riscv_vmadc_vv_i16m4_b4(__VA_ARGS__) |
| #define | vmadc_vv_i16m8_b2(...) __riscv_vmadc_vv_i16m8_b2(__VA_ARGS__) |
| #define | vmadc_vv_i16mf2_b32(...) __riscv_vmadc_vv_i16mf2_b32(__VA_ARGS__) |
| #define | vmadc_vv_i16mf4_b64(...) __riscv_vmadc_vv_i16mf4_b64(__VA_ARGS__) |
| #define | vmadc_vv_i32m1_b32(...) __riscv_vmadc_vv_i32m1_b32(__VA_ARGS__) |
| #define | vmadc_vv_i32m2_b16(...) __riscv_vmadc_vv_i32m2_b16(__VA_ARGS__) |
| #define | vmadc_vv_i32m4_b8(...) __riscv_vmadc_vv_i32m4_b8(__VA_ARGS__) |
| #define | vmadc_vv_i32m8_b4(...) __riscv_vmadc_vv_i32m8_b4(__VA_ARGS__) |
| #define | vmadc_vv_i32mf2_b64(...) __riscv_vmadc_vv_i32mf2_b64(__VA_ARGS__) |
| #define | vmadc_vv_i64m1_b64(...) __riscv_vmadc_vv_i64m1_b64(__VA_ARGS__) |
| #define | vmadc_vv_i64m2_b32(...) __riscv_vmadc_vv_i64m2_b32(__VA_ARGS__) |
| #define | vmadc_vv_i64m4_b16(...) __riscv_vmadc_vv_i64m4_b16(__VA_ARGS__) |
| #define | vmadc_vv_i64m8_b8(...) __riscv_vmadc_vv_i64m8_b8(__VA_ARGS__) |
| #define | vmadc_vv_i8m1_b8(...) __riscv_vmadc_vv_i8m1_b8(__VA_ARGS__) |
| #define | vmadc_vv_i8m2_b4(...) __riscv_vmadc_vv_i8m2_b4(__VA_ARGS__) |
| #define | vmadc_vv_i8m4_b2(...) __riscv_vmadc_vv_i8m4_b2(__VA_ARGS__) |
| #define | vmadc_vv_i8m8_b1(...) __riscv_vmadc_vv_i8m8_b1(__VA_ARGS__) |
| #define | vmadc_vv_i8mf2_b16(...) __riscv_vmadc_vv_i8mf2_b16(__VA_ARGS__) |
| #define | vmadc_vv_i8mf4_b32(...) __riscv_vmadc_vv_i8mf4_b32(__VA_ARGS__) |
| #define | vmadc_vv_i8mf8_b64(...) __riscv_vmadc_vv_i8mf8_b64(__VA_ARGS__) |
| #define | vmadc_vv_u16m1_b16(...) __riscv_vmadc_vv_u16m1_b16(__VA_ARGS__) |
| #define | vmadc_vv_u16m2_b8(...) __riscv_vmadc_vv_u16m2_b8(__VA_ARGS__) |
| #define | vmadc_vv_u16m4_b4(...) __riscv_vmadc_vv_u16m4_b4(__VA_ARGS__) |
| #define | vmadc_vv_u16m8_b2(...) __riscv_vmadc_vv_u16m8_b2(__VA_ARGS__) |
| #define | vmadc_vv_u16mf2_b32(...) __riscv_vmadc_vv_u16mf2_b32(__VA_ARGS__) |
| #define | vmadc_vv_u16mf4_b64(...) __riscv_vmadc_vv_u16mf4_b64(__VA_ARGS__) |
| #define | vmadc_vv_u32m1_b32(...) __riscv_vmadc_vv_u32m1_b32(__VA_ARGS__) |
| #define | vmadc_vv_u32m2_b16(...) __riscv_vmadc_vv_u32m2_b16(__VA_ARGS__) |
| #define | vmadc_vv_u32m4_b8(...) __riscv_vmadc_vv_u32m4_b8(__VA_ARGS__) |
| #define | vmadc_vv_u32m8_b4(...) __riscv_vmadc_vv_u32m8_b4(__VA_ARGS__) |
| #define | vmadc_vv_u32mf2_b64(...) __riscv_vmadc_vv_u32mf2_b64(__VA_ARGS__) |
| #define | vmadc_vv_u64m1_b64(...) __riscv_vmadc_vv_u64m1_b64(__VA_ARGS__) |
| #define | vmadc_vv_u64m2_b32(...) __riscv_vmadc_vv_u64m2_b32(__VA_ARGS__) |
| #define | vmadc_vv_u64m4_b16(...) __riscv_vmadc_vv_u64m4_b16(__VA_ARGS__) |
| #define | vmadc_vv_u64m8_b8(...) __riscv_vmadc_vv_u64m8_b8(__VA_ARGS__) |
| #define | vmadc_vv_u8m1_b8(...) __riscv_vmadc_vv_u8m1_b8(__VA_ARGS__) |
| #define | vmadc_vv_u8m2_b4(...) __riscv_vmadc_vv_u8m2_b4(__VA_ARGS__) |
| #define | vmadc_vv_u8m4_b2(...) __riscv_vmadc_vv_u8m4_b2(__VA_ARGS__) |
| #define | vmadc_vv_u8m8_b1(...) __riscv_vmadc_vv_u8m8_b1(__VA_ARGS__) |
| #define | vmadc_vv_u8mf2_b16(...) __riscv_vmadc_vv_u8mf2_b16(__VA_ARGS__) |
| #define | vmadc_vv_u8mf4_b32(...) __riscv_vmadc_vv_u8mf4_b32(__VA_ARGS__) |
| #define | vmadc_vv_u8mf8_b64(...) __riscv_vmadc_vv_u8mf8_b64(__VA_ARGS__) |
| #define | vmadc_vvm_i16m1_b16(...) __riscv_vmadc_vvm_i16m1_b16(__VA_ARGS__) |
| #define | vmadc_vvm_i16m2_b8(...) __riscv_vmadc_vvm_i16m2_b8(__VA_ARGS__) |
| #define | vmadc_vvm_i16m4_b4(...) __riscv_vmadc_vvm_i16m4_b4(__VA_ARGS__) |
| #define | vmadc_vvm_i16m8_b2(...) __riscv_vmadc_vvm_i16m8_b2(__VA_ARGS__) |
| #define | vmadc_vvm_i16mf2_b32(...) __riscv_vmadc_vvm_i16mf2_b32(__VA_ARGS__) |
| #define | vmadc_vvm_i16mf4_b64(...) __riscv_vmadc_vvm_i16mf4_b64(__VA_ARGS__) |
| #define | vmadc_vvm_i32m1_b32(...) __riscv_vmadc_vvm_i32m1_b32(__VA_ARGS__) |
| #define | vmadc_vvm_i32m2_b16(...) __riscv_vmadc_vvm_i32m2_b16(__VA_ARGS__) |
| #define | vmadc_vvm_i32m4_b8(...) __riscv_vmadc_vvm_i32m4_b8(__VA_ARGS__) |
| #define | vmadc_vvm_i32m8_b4(...) __riscv_vmadc_vvm_i32m8_b4(__VA_ARGS__) |
| #define | vmadc_vvm_i32mf2_b64(...) __riscv_vmadc_vvm_i32mf2_b64(__VA_ARGS__) |
| #define | vmadc_vvm_i64m1_b64(...) __riscv_vmadc_vvm_i64m1_b64(__VA_ARGS__) |
| #define | vmadc_vvm_i64m2_b32(...) __riscv_vmadc_vvm_i64m2_b32(__VA_ARGS__) |
| #define | vmadc_vvm_i64m4_b16(...) __riscv_vmadc_vvm_i64m4_b16(__VA_ARGS__) |
| #define | vmadc_vvm_i64m8_b8(...) __riscv_vmadc_vvm_i64m8_b8(__VA_ARGS__) |
| #define | vmadc_vvm_i8m1_b8(...) __riscv_vmadc_vvm_i8m1_b8(__VA_ARGS__) |
| #define | vmadc_vvm_i8m2_b4(...) __riscv_vmadc_vvm_i8m2_b4(__VA_ARGS__) |
| #define | vmadc_vvm_i8m4_b2(...) __riscv_vmadc_vvm_i8m4_b2(__VA_ARGS__) |
| #define | vmadc_vvm_i8m8_b1(...) __riscv_vmadc_vvm_i8m8_b1(__VA_ARGS__) |
| #define | vmadc_vvm_i8mf2_b16(...) __riscv_vmadc_vvm_i8mf2_b16(__VA_ARGS__) |
| #define | vmadc_vvm_i8mf4_b32(...) __riscv_vmadc_vvm_i8mf4_b32(__VA_ARGS__) |
| #define | vmadc_vvm_i8mf8_b64(...) __riscv_vmadc_vvm_i8mf8_b64(__VA_ARGS__) |
| #define | vmadc_vvm_u16m1_b16(...) __riscv_vmadc_vvm_u16m1_b16(__VA_ARGS__) |
| #define | vmadc_vvm_u16m2_b8(...) __riscv_vmadc_vvm_u16m2_b8(__VA_ARGS__) |
| #define | vmadc_vvm_u16m4_b4(...) __riscv_vmadc_vvm_u16m4_b4(__VA_ARGS__) |
| #define | vmadc_vvm_u16m8_b2(...) __riscv_vmadc_vvm_u16m8_b2(__VA_ARGS__) |
| #define | vmadc_vvm_u16mf2_b32(...) __riscv_vmadc_vvm_u16mf2_b32(__VA_ARGS__) |
| #define | vmadc_vvm_u16mf4_b64(...) __riscv_vmadc_vvm_u16mf4_b64(__VA_ARGS__) |
| #define | vmadc_vvm_u32m1_b32(...) __riscv_vmadc_vvm_u32m1_b32(__VA_ARGS__) |
| #define | vmadc_vvm_u32m2_b16(...) __riscv_vmadc_vvm_u32m2_b16(__VA_ARGS__) |
| #define | vmadc_vvm_u32m4_b8(...) __riscv_vmadc_vvm_u32m4_b8(__VA_ARGS__) |
| #define | vmadc_vvm_u32m8_b4(...) __riscv_vmadc_vvm_u32m8_b4(__VA_ARGS__) |
| #define | vmadc_vvm_u32mf2_b64(...) __riscv_vmadc_vvm_u32mf2_b64(__VA_ARGS__) |
| #define | vmadc_vvm_u64m1_b64(...) __riscv_vmadc_vvm_u64m1_b64(__VA_ARGS__) |
| #define | vmadc_vvm_u64m2_b32(...) __riscv_vmadc_vvm_u64m2_b32(__VA_ARGS__) |
| #define | vmadc_vvm_u64m4_b16(...) __riscv_vmadc_vvm_u64m4_b16(__VA_ARGS__) |
| #define | vmadc_vvm_u64m8_b8(...) __riscv_vmadc_vvm_u64m8_b8(__VA_ARGS__) |
| #define | vmadc_vvm_u8m1_b8(...) __riscv_vmadc_vvm_u8m1_b8(__VA_ARGS__) |
| #define | vmadc_vvm_u8m2_b4(...) __riscv_vmadc_vvm_u8m2_b4(__VA_ARGS__) |
| #define | vmadc_vvm_u8m4_b2(...) __riscv_vmadc_vvm_u8m4_b2(__VA_ARGS__) |
| #define | vmadc_vvm_u8m8_b1(...) __riscv_vmadc_vvm_u8m8_b1(__VA_ARGS__) |
| #define | vmadc_vvm_u8mf2_b16(...) __riscv_vmadc_vvm_u8mf2_b16(__VA_ARGS__) |
| #define | vmadc_vvm_u8mf4_b32(...) __riscv_vmadc_vvm_u8mf4_b32(__VA_ARGS__) |
| #define | vmadc_vvm_u8mf8_b64(...) __riscv_vmadc_vvm_u8mf8_b64(__VA_ARGS__) |
| #define | vmadc_vx_i16m1_b16(...) __riscv_vmadc_vx_i16m1_b16(__VA_ARGS__) |
| #define | vmadc_vx_i16m2_b8(...) __riscv_vmadc_vx_i16m2_b8(__VA_ARGS__) |
| #define | vmadc_vx_i16m4_b4(...) __riscv_vmadc_vx_i16m4_b4(__VA_ARGS__) |
| #define | vmadc_vx_i16m8_b2(...) __riscv_vmadc_vx_i16m8_b2(__VA_ARGS__) |
| #define | vmadc_vx_i16mf2_b32(...) __riscv_vmadc_vx_i16mf2_b32(__VA_ARGS__) |
| #define | vmadc_vx_i16mf4_b64(...) __riscv_vmadc_vx_i16mf4_b64(__VA_ARGS__) |
| #define | vmadc_vx_i32m1_b32(...) __riscv_vmadc_vx_i32m1_b32(__VA_ARGS__) |
| #define | vmadc_vx_i32m2_b16(...) __riscv_vmadc_vx_i32m2_b16(__VA_ARGS__) |
| #define | vmadc_vx_i32m4_b8(...) __riscv_vmadc_vx_i32m4_b8(__VA_ARGS__) |
| #define | vmadc_vx_i32m8_b4(...) __riscv_vmadc_vx_i32m8_b4(__VA_ARGS__) |
| #define | vmadc_vx_i32mf2_b64(...) __riscv_vmadc_vx_i32mf2_b64(__VA_ARGS__) |
| #define | vmadc_vx_i64m1_b64(...) __riscv_vmadc_vx_i64m1_b64(__VA_ARGS__) |
| #define | vmadc_vx_i64m2_b32(...) __riscv_vmadc_vx_i64m2_b32(__VA_ARGS__) |
| #define | vmadc_vx_i64m4_b16(...) __riscv_vmadc_vx_i64m4_b16(__VA_ARGS__) |
| #define | vmadc_vx_i64m8_b8(...) __riscv_vmadc_vx_i64m8_b8(__VA_ARGS__) |
| #define | vmadc_vx_i8m1_b8(...) __riscv_vmadc_vx_i8m1_b8(__VA_ARGS__) |
| #define | vmadc_vx_i8m2_b4(...) __riscv_vmadc_vx_i8m2_b4(__VA_ARGS__) |
| #define | vmadc_vx_i8m4_b2(...) __riscv_vmadc_vx_i8m4_b2(__VA_ARGS__) |
| #define | vmadc_vx_i8m8_b1(...) __riscv_vmadc_vx_i8m8_b1(__VA_ARGS__) |
| #define | vmadc_vx_i8mf2_b16(...) __riscv_vmadc_vx_i8mf2_b16(__VA_ARGS__) |
| #define | vmadc_vx_i8mf4_b32(...) __riscv_vmadc_vx_i8mf4_b32(__VA_ARGS__) |
| #define | vmadc_vx_i8mf8_b64(...) __riscv_vmadc_vx_i8mf8_b64(__VA_ARGS__) |
| #define | vmadc_vx_u16m1_b16(...) __riscv_vmadc_vx_u16m1_b16(__VA_ARGS__) |
| #define | vmadc_vx_u16m2_b8(...) __riscv_vmadc_vx_u16m2_b8(__VA_ARGS__) |
| #define | vmadc_vx_u16m4_b4(...) __riscv_vmadc_vx_u16m4_b4(__VA_ARGS__) |
| #define | vmadc_vx_u16m8_b2(...) __riscv_vmadc_vx_u16m8_b2(__VA_ARGS__) |
| #define | vmadc_vx_u16mf2_b32(...) __riscv_vmadc_vx_u16mf2_b32(__VA_ARGS__) |
| #define | vmadc_vx_u16mf4_b64(...) __riscv_vmadc_vx_u16mf4_b64(__VA_ARGS__) |
| #define | vmadc_vx_u32m1_b32(...) __riscv_vmadc_vx_u32m1_b32(__VA_ARGS__) |
| #define | vmadc_vx_u32m2_b16(...) __riscv_vmadc_vx_u32m2_b16(__VA_ARGS__) |
| #define | vmadc_vx_u32m4_b8(...) __riscv_vmadc_vx_u32m4_b8(__VA_ARGS__) |
| #define | vmadc_vx_u32m8_b4(...) __riscv_vmadc_vx_u32m8_b4(__VA_ARGS__) |
| #define | vmadc_vx_u32mf2_b64(...) __riscv_vmadc_vx_u32mf2_b64(__VA_ARGS__) |
| #define | vmadc_vx_u64m1_b64(...) __riscv_vmadc_vx_u64m1_b64(__VA_ARGS__) |
| #define | vmadc_vx_u64m2_b32(...) __riscv_vmadc_vx_u64m2_b32(__VA_ARGS__) |
| #define | vmadc_vx_u64m4_b16(...) __riscv_vmadc_vx_u64m4_b16(__VA_ARGS__) |
| #define | vmadc_vx_u64m8_b8(...) __riscv_vmadc_vx_u64m8_b8(__VA_ARGS__) |
| #define | vmadc_vx_u8m1_b8(...) __riscv_vmadc_vx_u8m1_b8(__VA_ARGS__) |
| #define | vmadc_vx_u8m2_b4(...) __riscv_vmadc_vx_u8m2_b4(__VA_ARGS__) |
| #define | vmadc_vx_u8m4_b2(...) __riscv_vmadc_vx_u8m4_b2(__VA_ARGS__) |
| #define | vmadc_vx_u8m8_b1(...) __riscv_vmadc_vx_u8m8_b1(__VA_ARGS__) |
| #define | vmadc_vx_u8mf2_b16(...) __riscv_vmadc_vx_u8mf2_b16(__VA_ARGS__) |
| #define | vmadc_vx_u8mf4_b32(...) __riscv_vmadc_vx_u8mf4_b32(__VA_ARGS__) |
| #define | vmadc_vx_u8mf8_b64(...) __riscv_vmadc_vx_u8mf8_b64(__VA_ARGS__) |
| #define | vmadc_vxm_i16m1_b16(...) __riscv_vmadc_vxm_i16m1_b16(__VA_ARGS__) |
| #define | vmadc_vxm_i16m2_b8(...) __riscv_vmadc_vxm_i16m2_b8(__VA_ARGS__) |
| #define | vmadc_vxm_i16m4_b4(...) __riscv_vmadc_vxm_i16m4_b4(__VA_ARGS__) |
| #define | vmadc_vxm_i16m8_b2(...) __riscv_vmadc_vxm_i16m8_b2(__VA_ARGS__) |
| #define | vmadc_vxm_i16mf2_b32(...) __riscv_vmadc_vxm_i16mf2_b32(__VA_ARGS__) |
| #define | vmadc_vxm_i16mf4_b64(...) __riscv_vmadc_vxm_i16mf4_b64(__VA_ARGS__) |
| #define | vmadc_vxm_i32m1_b32(...) __riscv_vmadc_vxm_i32m1_b32(__VA_ARGS__) |
| #define | vmadc_vxm_i32m2_b16(...) __riscv_vmadc_vxm_i32m2_b16(__VA_ARGS__) |
| #define | vmadc_vxm_i32m4_b8(...) __riscv_vmadc_vxm_i32m4_b8(__VA_ARGS__) |
| #define | vmadc_vxm_i32m8_b4(...) __riscv_vmadc_vxm_i32m8_b4(__VA_ARGS__) |
| #define | vmadc_vxm_i32mf2_b64(...) __riscv_vmadc_vxm_i32mf2_b64(__VA_ARGS__) |
| #define | vmadc_vxm_i64m1_b64(...) __riscv_vmadc_vxm_i64m1_b64(__VA_ARGS__) |
| #define | vmadc_vxm_i64m2_b32(...) __riscv_vmadc_vxm_i64m2_b32(__VA_ARGS__) |
| #define | vmadc_vxm_i64m4_b16(...) __riscv_vmadc_vxm_i64m4_b16(__VA_ARGS__) |
| #define | vmadc_vxm_i64m8_b8(...) __riscv_vmadc_vxm_i64m8_b8(__VA_ARGS__) |
| #define | vmadc_vxm_i8m1_b8(...) __riscv_vmadc_vxm_i8m1_b8(__VA_ARGS__) |
| #define | vmadc_vxm_i8m2_b4(...) __riscv_vmadc_vxm_i8m2_b4(__VA_ARGS__) |
| #define | vmadc_vxm_i8m4_b2(...) __riscv_vmadc_vxm_i8m4_b2(__VA_ARGS__) |
| #define | vmadc_vxm_i8m8_b1(...) __riscv_vmadc_vxm_i8m8_b1(__VA_ARGS__) |
| #define | vmadc_vxm_i8mf2_b16(...) __riscv_vmadc_vxm_i8mf2_b16(__VA_ARGS__) |
| #define | vmadc_vxm_i8mf4_b32(...) __riscv_vmadc_vxm_i8mf4_b32(__VA_ARGS__) |
| #define | vmadc_vxm_i8mf8_b64(...) __riscv_vmadc_vxm_i8mf8_b64(__VA_ARGS__) |
| #define | vmadc_vxm_u16m1_b16(...) __riscv_vmadc_vxm_u16m1_b16(__VA_ARGS__) |
| #define | vmadc_vxm_u16m2_b8(...) __riscv_vmadc_vxm_u16m2_b8(__VA_ARGS__) |
| #define | vmadc_vxm_u16m4_b4(...) __riscv_vmadc_vxm_u16m4_b4(__VA_ARGS__) |
| #define | vmadc_vxm_u16m8_b2(...) __riscv_vmadc_vxm_u16m8_b2(__VA_ARGS__) |
| #define | vmadc_vxm_u16mf2_b32(...) __riscv_vmadc_vxm_u16mf2_b32(__VA_ARGS__) |
| #define | vmadc_vxm_u16mf4_b64(...) __riscv_vmadc_vxm_u16mf4_b64(__VA_ARGS__) |
| #define | vmadc_vxm_u32m1_b32(...) __riscv_vmadc_vxm_u32m1_b32(__VA_ARGS__) |
| #define | vmadc_vxm_u32m2_b16(...) __riscv_vmadc_vxm_u32m2_b16(__VA_ARGS__) |
| #define | vmadc_vxm_u32m4_b8(...) __riscv_vmadc_vxm_u32m4_b8(__VA_ARGS__) |
| #define | vmadc_vxm_u32m8_b4(...) __riscv_vmadc_vxm_u32m8_b4(__VA_ARGS__) |
| #define | vmadc_vxm_u32mf2_b64(...) __riscv_vmadc_vxm_u32mf2_b64(__VA_ARGS__) |
| #define | vmadc_vxm_u64m1_b64(...) __riscv_vmadc_vxm_u64m1_b64(__VA_ARGS__) |
| #define | vmadc_vxm_u64m2_b32(...) __riscv_vmadc_vxm_u64m2_b32(__VA_ARGS__) |
| #define | vmadc_vxm_u64m4_b16(...) __riscv_vmadc_vxm_u64m4_b16(__VA_ARGS__) |
| #define | vmadc_vxm_u64m8_b8(...) __riscv_vmadc_vxm_u64m8_b8(__VA_ARGS__) |
| #define | vmadc_vxm_u8m1_b8(...) __riscv_vmadc_vxm_u8m1_b8(__VA_ARGS__) |
| #define | vmadc_vxm_u8m2_b4(...) __riscv_vmadc_vxm_u8m2_b4(__VA_ARGS__) |
| #define | vmadc_vxm_u8m4_b2(...) __riscv_vmadc_vxm_u8m4_b2(__VA_ARGS__) |
| #define | vmadc_vxm_u8m8_b1(...) __riscv_vmadc_vxm_u8m8_b1(__VA_ARGS__) |
| #define | vmadc_vxm_u8mf2_b16(...) __riscv_vmadc_vxm_u8mf2_b16(__VA_ARGS__) |
| #define | vmadc_vxm_u8mf4_b32(...) __riscv_vmadc_vxm_u8mf4_b32(__VA_ARGS__) |
| #define | vmadc_vxm_u8mf8_b64(...) __riscv_vmadc_vxm_u8mf8_b64(__VA_ARGS__) |
| #define | vmadd_vv_i16m1(...) __riscv_vmadd_vv_i16m1_tu(__VA_ARGS__) |
| #define | vmadd_vv_i16m1_m(...) __riscv_vmadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i16m2(...) __riscv_vmadd_vv_i16m2_tu(__VA_ARGS__) |
| #define | vmadd_vv_i16m2_m(...) __riscv_vmadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i16m4(...) __riscv_vmadd_vv_i16m4_tu(__VA_ARGS__) |
| #define | vmadd_vv_i16m4_m(...) __riscv_vmadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i16m8(...) __riscv_vmadd_vv_i16m8_tu(__VA_ARGS__) |
| #define | vmadd_vv_i16m8_m(...) __riscv_vmadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i16mf2(...) __riscv_vmadd_vv_i16mf2_tu(__VA_ARGS__) |
| #define | vmadd_vv_i16mf2_m(...) __riscv_vmadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i16mf4(...) __riscv_vmadd_vv_i16mf4_tu(__VA_ARGS__) |
| #define | vmadd_vv_i16mf4_m(...) __riscv_vmadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i32m1(...) __riscv_vmadd_vv_i32m1_tu(__VA_ARGS__) |
| #define | vmadd_vv_i32m1_m(...) __riscv_vmadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i32m2(...) __riscv_vmadd_vv_i32m2_tu(__VA_ARGS__) |
| #define | vmadd_vv_i32m2_m(...) __riscv_vmadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i32m4(...) __riscv_vmadd_vv_i32m4_tu(__VA_ARGS__) |
| #define | vmadd_vv_i32m4_m(...) __riscv_vmadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i32m8(...) __riscv_vmadd_vv_i32m8_tu(__VA_ARGS__) |
| #define | vmadd_vv_i32m8_m(...) __riscv_vmadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i32mf2(...) __riscv_vmadd_vv_i32mf2_tu(__VA_ARGS__) |
| #define | vmadd_vv_i32mf2_m(...) __riscv_vmadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i64m1(...) __riscv_vmadd_vv_i64m1_tu(__VA_ARGS__) |
| #define | vmadd_vv_i64m1_m(...) __riscv_vmadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i64m2(...) __riscv_vmadd_vv_i64m2_tu(__VA_ARGS__) |
| #define | vmadd_vv_i64m2_m(...) __riscv_vmadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i64m4(...) __riscv_vmadd_vv_i64m4_tu(__VA_ARGS__) |
| #define | vmadd_vv_i64m4_m(...) __riscv_vmadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i64m8(...) __riscv_vmadd_vv_i64m8_tu(__VA_ARGS__) |
| #define | vmadd_vv_i64m8_m(...) __riscv_vmadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i8m1(...) __riscv_vmadd_vv_i8m1_tu(__VA_ARGS__) |
| #define | vmadd_vv_i8m1_m(...) __riscv_vmadd_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i8m2(...) __riscv_vmadd_vv_i8m2_tu(__VA_ARGS__) |
| #define | vmadd_vv_i8m2_m(...) __riscv_vmadd_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i8m4(...) __riscv_vmadd_vv_i8m4_tu(__VA_ARGS__) |
| #define | vmadd_vv_i8m4_m(...) __riscv_vmadd_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i8m8(...) __riscv_vmadd_vv_i8m8_tu(__VA_ARGS__) |
| #define | vmadd_vv_i8m8_m(...) __riscv_vmadd_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i8mf2(...) __riscv_vmadd_vv_i8mf2_tu(__VA_ARGS__) |
| #define | vmadd_vv_i8mf2_m(...) __riscv_vmadd_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i8mf4(...) __riscv_vmadd_vv_i8mf4_tu(__VA_ARGS__) |
| #define | vmadd_vv_i8mf4_m(...) __riscv_vmadd_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_i8mf8(...) __riscv_vmadd_vv_i8mf8_tu(__VA_ARGS__) |
| #define | vmadd_vv_i8mf8_m(...) __riscv_vmadd_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u16m1(...) __riscv_vmadd_vv_u16m1_tu(__VA_ARGS__) |
| #define | vmadd_vv_u16m1_m(...) __riscv_vmadd_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u16m2(...) __riscv_vmadd_vv_u16m2_tu(__VA_ARGS__) |
| #define | vmadd_vv_u16m2_m(...) __riscv_vmadd_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u16m4(...) __riscv_vmadd_vv_u16m4_tu(__VA_ARGS__) |
| #define | vmadd_vv_u16m4_m(...) __riscv_vmadd_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u16m8(...) __riscv_vmadd_vv_u16m8_tu(__VA_ARGS__) |
| #define | vmadd_vv_u16m8_m(...) __riscv_vmadd_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u16mf2(...) __riscv_vmadd_vv_u16mf2_tu(__VA_ARGS__) |
| #define | vmadd_vv_u16mf2_m(...) __riscv_vmadd_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u16mf4(...) __riscv_vmadd_vv_u16mf4_tu(__VA_ARGS__) |
| #define | vmadd_vv_u16mf4_m(...) __riscv_vmadd_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u32m1(...) __riscv_vmadd_vv_u32m1_tu(__VA_ARGS__) |
| #define | vmadd_vv_u32m1_m(...) __riscv_vmadd_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u32m2(...) __riscv_vmadd_vv_u32m2_tu(__VA_ARGS__) |
| #define | vmadd_vv_u32m2_m(...) __riscv_vmadd_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u32m4(...) __riscv_vmadd_vv_u32m4_tu(__VA_ARGS__) |
| #define | vmadd_vv_u32m4_m(...) __riscv_vmadd_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u32m8(...) __riscv_vmadd_vv_u32m8_tu(__VA_ARGS__) |
| #define | vmadd_vv_u32m8_m(...) __riscv_vmadd_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u32mf2(...) __riscv_vmadd_vv_u32mf2_tu(__VA_ARGS__) |
| #define | vmadd_vv_u32mf2_m(...) __riscv_vmadd_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u64m1(...) __riscv_vmadd_vv_u64m1_tu(__VA_ARGS__) |
| #define | vmadd_vv_u64m1_m(...) __riscv_vmadd_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u64m2(...) __riscv_vmadd_vv_u64m2_tu(__VA_ARGS__) |
| #define | vmadd_vv_u64m2_m(...) __riscv_vmadd_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u64m4(...) __riscv_vmadd_vv_u64m4_tu(__VA_ARGS__) |
| #define | vmadd_vv_u64m4_m(...) __riscv_vmadd_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u64m8(...) __riscv_vmadd_vv_u64m8_tu(__VA_ARGS__) |
| #define | vmadd_vv_u64m8_m(...) __riscv_vmadd_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u8m1(...) __riscv_vmadd_vv_u8m1_tu(__VA_ARGS__) |
| #define | vmadd_vv_u8m1_m(...) __riscv_vmadd_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u8m2(...) __riscv_vmadd_vv_u8m2_tu(__VA_ARGS__) |
| #define | vmadd_vv_u8m2_m(...) __riscv_vmadd_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u8m4(...) __riscv_vmadd_vv_u8m4_tu(__VA_ARGS__) |
| #define | vmadd_vv_u8m4_m(...) __riscv_vmadd_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u8m8(...) __riscv_vmadd_vv_u8m8_tu(__VA_ARGS__) |
| #define | vmadd_vv_u8m8_m(...) __riscv_vmadd_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u8mf2(...) __riscv_vmadd_vv_u8mf2_tu(__VA_ARGS__) |
| #define | vmadd_vv_u8mf2_m(...) __riscv_vmadd_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u8mf4(...) __riscv_vmadd_vv_u8mf4_tu(__VA_ARGS__) |
| #define | vmadd_vv_u8mf4_m(...) __riscv_vmadd_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vmadd_vv_u8mf8(...) __riscv_vmadd_vv_u8mf8_tu(__VA_ARGS__) |
| #define | vmadd_vv_u8mf8_m(...) __riscv_vmadd_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i16m1(...) __riscv_vmadd_vx_i16m1_tu(__VA_ARGS__) |
| #define | vmadd_vx_i16m1_m(...) __riscv_vmadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i16m2(...) __riscv_vmadd_vx_i16m2_tu(__VA_ARGS__) |
| #define | vmadd_vx_i16m2_m(...) __riscv_vmadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i16m4(...) __riscv_vmadd_vx_i16m4_tu(__VA_ARGS__) |
| #define | vmadd_vx_i16m4_m(...) __riscv_vmadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i16m8(...) __riscv_vmadd_vx_i16m8_tu(__VA_ARGS__) |
| #define | vmadd_vx_i16m8_m(...) __riscv_vmadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i16mf2(...) __riscv_vmadd_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vmadd_vx_i16mf2_m(...) __riscv_vmadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i16mf4(...) __riscv_vmadd_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vmadd_vx_i16mf4_m(...) __riscv_vmadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i32m1(...) __riscv_vmadd_vx_i32m1_tu(__VA_ARGS__) |
| #define | vmadd_vx_i32m1_m(...) __riscv_vmadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i32m2(...) __riscv_vmadd_vx_i32m2_tu(__VA_ARGS__) |
| #define | vmadd_vx_i32m2_m(...) __riscv_vmadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i32m4(...) __riscv_vmadd_vx_i32m4_tu(__VA_ARGS__) |
| #define | vmadd_vx_i32m4_m(...) __riscv_vmadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i32m8(...) __riscv_vmadd_vx_i32m8_tu(__VA_ARGS__) |
| #define | vmadd_vx_i32m8_m(...) __riscv_vmadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i32mf2(...) __riscv_vmadd_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vmadd_vx_i32mf2_m(...) __riscv_vmadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i64m1(...) __riscv_vmadd_vx_i64m1_tu(__VA_ARGS__) |
| #define | vmadd_vx_i64m1_m(...) __riscv_vmadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i64m2(...) __riscv_vmadd_vx_i64m2_tu(__VA_ARGS__) |
| #define | vmadd_vx_i64m2_m(...) __riscv_vmadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i64m4(...) __riscv_vmadd_vx_i64m4_tu(__VA_ARGS__) |
| #define | vmadd_vx_i64m4_m(...) __riscv_vmadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i64m8(...) __riscv_vmadd_vx_i64m8_tu(__VA_ARGS__) |
| #define | vmadd_vx_i64m8_m(...) __riscv_vmadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i8m1(...) __riscv_vmadd_vx_i8m1_tu(__VA_ARGS__) |
| #define | vmadd_vx_i8m1_m(...) __riscv_vmadd_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i8m2(...) __riscv_vmadd_vx_i8m2_tu(__VA_ARGS__) |
| #define | vmadd_vx_i8m2_m(...) __riscv_vmadd_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i8m4(...) __riscv_vmadd_vx_i8m4_tu(__VA_ARGS__) |
| #define | vmadd_vx_i8m4_m(...) __riscv_vmadd_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i8m8(...) __riscv_vmadd_vx_i8m8_tu(__VA_ARGS__) |
| #define | vmadd_vx_i8m8_m(...) __riscv_vmadd_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i8mf2(...) __riscv_vmadd_vx_i8mf2_tu(__VA_ARGS__) |
| #define | vmadd_vx_i8mf2_m(...) __riscv_vmadd_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i8mf4(...) __riscv_vmadd_vx_i8mf4_tu(__VA_ARGS__) |
| #define | vmadd_vx_i8mf4_m(...) __riscv_vmadd_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_i8mf8(...) __riscv_vmadd_vx_i8mf8_tu(__VA_ARGS__) |
| #define | vmadd_vx_i8mf8_m(...) __riscv_vmadd_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u16m1(...) __riscv_vmadd_vx_u16m1_tu(__VA_ARGS__) |
| #define | vmadd_vx_u16m1_m(...) __riscv_vmadd_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u16m2(...) __riscv_vmadd_vx_u16m2_tu(__VA_ARGS__) |
| #define | vmadd_vx_u16m2_m(...) __riscv_vmadd_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u16m4(...) __riscv_vmadd_vx_u16m4_tu(__VA_ARGS__) |
| #define | vmadd_vx_u16m4_m(...) __riscv_vmadd_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u16m8(...) __riscv_vmadd_vx_u16m8_tu(__VA_ARGS__) |
| #define | vmadd_vx_u16m8_m(...) __riscv_vmadd_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u16mf2(...) __riscv_vmadd_vx_u16mf2_tu(__VA_ARGS__) |
| #define | vmadd_vx_u16mf2_m(...) __riscv_vmadd_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u16mf4(...) __riscv_vmadd_vx_u16mf4_tu(__VA_ARGS__) |
| #define | vmadd_vx_u16mf4_m(...) __riscv_vmadd_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u32m1(...) __riscv_vmadd_vx_u32m1_tu(__VA_ARGS__) |
| #define | vmadd_vx_u32m1_m(...) __riscv_vmadd_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u32m2(...) __riscv_vmadd_vx_u32m2_tu(__VA_ARGS__) |
| #define | vmadd_vx_u32m2_m(...) __riscv_vmadd_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u32m4(...) __riscv_vmadd_vx_u32m4_tu(__VA_ARGS__) |
| #define | vmadd_vx_u32m4_m(...) __riscv_vmadd_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u32m8(...) __riscv_vmadd_vx_u32m8_tu(__VA_ARGS__) |
| #define | vmadd_vx_u32m8_m(...) __riscv_vmadd_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u32mf2(...) __riscv_vmadd_vx_u32mf2_tu(__VA_ARGS__) |
| #define | vmadd_vx_u32mf2_m(...) __riscv_vmadd_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u64m1(...) __riscv_vmadd_vx_u64m1_tu(__VA_ARGS__) |
| #define | vmadd_vx_u64m1_m(...) __riscv_vmadd_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u64m2(...) __riscv_vmadd_vx_u64m2_tu(__VA_ARGS__) |
| #define | vmadd_vx_u64m2_m(...) __riscv_vmadd_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u64m4(...) __riscv_vmadd_vx_u64m4_tu(__VA_ARGS__) |
| #define | vmadd_vx_u64m4_m(...) __riscv_vmadd_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u64m8(...) __riscv_vmadd_vx_u64m8_tu(__VA_ARGS__) |
| #define | vmadd_vx_u64m8_m(...) __riscv_vmadd_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u8m1(...) __riscv_vmadd_vx_u8m1_tu(__VA_ARGS__) |
| #define | vmadd_vx_u8m1_m(...) __riscv_vmadd_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u8m2(...) __riscv_vmadd_vx_u8m2_tu(__VA_ARGS__) |
| #define | vmadd_vx_u8m2_m(...) __riscv_vmadd_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u8m4(...) __riscv_vmadd_vx_u8m4_tu(__VA_ARGS__) |
| #define | vmadd_vx_u8m4_m(...) __riscv_vmadd_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u8m8(...) __riscv_vmadd_vx_u8m8_tu(__VA_ARGS__) |
| #define | vmadd_vx_u8m8_m(...) __riscv_vmadd_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u8mf2(...) __riscv_vmadd_vx_u8mf2_tu(__VA_ARGS__) |
| #define | vmadd_vx_u8mf2_m(...) __riscv_vmadd_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u8mf4(...) __riscv_vmadd_vx_u8mf4_tu(__VA_ARGS__) |
| #define | vmadd_vx_u8mf4_m(...) __riscv_vmadd_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vmadd_vx_u8mf8(...) __riscv_vmadd_vx_u8mf8_tu(__VA_ARGS__) |
| #define | vmadd_vx_u8mf8_m(...) __riscv_vmadd_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vmand_mm_b1(...) __riscv_vmand_mm_b1(__VA_ARGS__) |
| #define | vmand_mm_b16(...) __riscv_vmand_mm_b16(__VA_ARGS__) |
| #define | vmand_mm_b2(...) __riscv_vmand_mm_b2(__VA_ARGS__) |
| #define | vmand_mm_b32(...) __riscv_vmand_mm_b32(__VA_ARGS__) |
| #define | vmand_mm_b4(...) __riscv_vmand_mm_b4(__VA_ARGS__) |
| #define | vmand_mm_b64(...) __riscv_vmand_mm_b64(__VA_ARGS__) |
| #define | vmand_mm_b8(...) __riscv_vmand_mm_b8(__VA_ARGS__) |
| #define | vmandn_mm_b1(...) __riscv_vmandn_mm_b1(__VA_ARGS__) |
| #define | vmandn_mm_b16(...) __riscv_vmandn_mm_b16(__VA_ARGS__) |
| #define | vmandn_mm_b2(...) __riscv_vmandn_mm_b2(__VA_ARGS__) |
| #define | vmandn_mm_b32(...) __riscv_vmandn_mm_b32(__VA_ARGS__) |
| #define | vmandn_mm_b4(...) __riscv_vmandn_mm_b4(__VA_ARGS__) |
| #define | vmandn_mm_b64(...) __riscv_vmandn_mm_b64(__VA_ARGS__) |
| #define | vmandn_mm_b8(...) __riscv_vmandn_mm_b8(__VA_ARGS__) |
| #define | vmax_vv_i16m1(...) __riscv_vmax_vv_i16m1(__VA_ARGS__) |
| #define | vmax_vv_i16m1_m(...) __riscv_vmax_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vmax_vv_i16m2(...) __riscv_vmax_vv_i16m2(__VA_ARGS__) |
| #define | vmax_vv_i16m2_m(...) __riscv_vmax_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vmax_vv_i16m4(...) __riscv_vmax_vv_i16m4(__VA_ARGS__) |
| #define | vmax_vv_i16m4_m(...) __riscv_vmax_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vmax_vv_i16m8(...) __riscv_vmax_vv_i16m8(__VA_ARGS__) |
| #define | vmax_vv_i16m8_m(...) __riscv_vmax_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vmax_vv_i16mf2(...) __riscv_vmax_vv_i16mf2(__VA_ARGS__) |
| #define | vmax_vv_i16mf2_m(...) __riscv_vmax_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vmax_vv_i16mf4(...) __riscv_vmax_vv_i16mf4(__VA_ARGS__) |
| #define | vmax_vv_i16mf4_m(...) __riscv_vmax_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vmax_vv_i32m1(...) __riscv_vmax_vv_i32m1(__VA_ARGS__) |
| #define | vmax_vv_i32m1_m(...) __riscv_vmax_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vmax_vv_i32m2(...) __riscv_vmax_vv_i32m2(__VA_ARGS__) |
| #define | vmax_vv_i32m2_m(...) __riscv_vmax_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vmax_vv_i32m4(...) __riscv_vmax_vv_i32m4(__VA_ARGS__) |
| #define | vmax_vv_i32m4_m(...) __riscv_vmax_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vmax_vv_i32m8(...) __riscv_vmax_vv_i32m8(__VA_ARGS__) |
| #define | vmax_vv_i32m8_m(...) __riscv_vmax_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vmax_vv_i32mf2(...) __riscv_vmax_vv_i32mf2(__VA_ARGS__) |
| #define | vmax_vv_i32mf2_m(...) __riscv_vmax_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vmax_vv_i64m1(...) __riscv_vmax_vv_i64m1(__VA_ARGS__) |
| #define | vmax_vv_i64m1_m(...) __riscv_vmax_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vmax_vv_i64m2(...) __riscv_vmax_vv_i64m2(__VA_ARGS__) |
| #define | vmax_vv_i64m2_m(...) __riscv_vmax_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vmax_vv_i64m4(...) __riscv_vmax_vv_i64m4(__VA_ARGS__) |
| #define | vmax_vv_i64m4_m(...) __riscv_vmax_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vmax_vv_i64m8(...) __riscv_vmax_vv_i64m8(__VA_ARGS__) |
| #define | vmax_vv_i64m8_m(...) __riscv_vmax_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vmax_vv_i8m1(...) __riscv_vmax_vv_i8m1(__VA_ARGS__) |
| #define | vmax_vv_i8m1_m(...) __riscv_vmax_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vmax_vv_i8m2(...) __riscv_vmax_vv_i8m2(__VA_ARGS__) |
| #define | vmax_vv_i8m2_m(...) __riscv_vmax_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vmax_vv_i8m4(...) __riscv_vmax_vv_i8m4(__VA_ARGS__) |
| #define | vmax_vv_i8m4_m(...) __riscv_vmax_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vmax_vv_i8m8(...) __riscv_vmax_vv_i8m8(__VA_ARGS__) |
| #define | vmax_vv_i8m8_m(...) __riscv_vmax_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vmax_vv_i8mf2(...) __riscv_vmax_vv_i8mf2(__VA_ARGS__) |
| #define | vmax_vv_i8mf2_m(...) __riscv_vmax_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vmax_vv_i8mf4(...) __riscv_vmax_vv_i8mf4(__VA_ARGS__) |
| #define | vmax_vv_i8mf4_m(...) __riscv_vmax_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vmax_vv_i8mf8(...) __riscv_vmax_vv_i8mf8(__VA_ARGS__) |
| #define | vmax_vv_i8mf8_m(...) __riscv_vmax_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vmax_vx_i16m1(...) __riscv_vmax_vx_i16m1(__VA_ARGS__) |
| #define | vmax_vx_i16m1_m(...) __riscv_vmax_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vmax_vx_i16m2(...) __riscv_vmax_vx_i16m2(__VA_ARGS__) |
| #define | vmax_vx_i16m2_m(...) __riscv_vmax_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vmax_vx_i16m4(...) __riscv_vmax_vx_i16m4(__VA_ARGS__) |
| #define | vmax_vx_i16m4_m(...) __riscv_vmax_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vmax_vx_i16m8(...) __riscv_vmax_vx_i16m8(__VA_ARGS__) |
| #define | vmax_vx_i16m8_m(...) __riscv_vmax_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vmax_vx_i16mf2(...) __riscv_vmax_vx_i16mf2(__VA_ARGS__) |
| #define | vmax_vx_i16mf2_m(...) __riscv_vmax_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vmax_vx_i16mf4(...) __riscv_vmax_vx_i16mf4(__VA_ARGS__) |
| #define | vmax_vx_i16mf4_m(...) __riscv_vmax_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vmax_vx_i32m1(...) __riscv_vmax_vx_i32m1(__VA_ARGS__) |
| #define | vmax_vx_i32m1_m(...) __riscv_vmax_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vmax_vx_i32m2(...) __riscv_vmax_vx_i32m2(__VA_ARGS__) |
| #define | vmax_vx_i32m2_m(...) __riscv_vmax_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vmax_vx_i32m4(...) __riscv_vmax_vx_i32m4(__VA_ARGS__) |
| #define | vmax_vx_i32m4_m(...) __riscv_vmax_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vmax_vx_i32m8(...) __riscv_vmax_vx_i32m8(__VA_ARGS__) |
| #define | vmax_vx_i32m8_m(...) __riscv_vmax_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vmax_vx_i32mf2(...) __riscv_vmax_vx_i32mf2(__VA_ARGS__) |
| #define | vmax_vx_i32mf2_m(...) __riscv_vmax_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vmax_vx_i64m1(...) __riscv_vmax_vx_i64m1(__VA_ARGS__) |
| #define | vmax_vx_i64m1_m(...) __riscv_vmax_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vmax_vx_i64m2(...) __riscv_vmax_vx_i64m2(__VA_ARGS__) |
| #define | vmax_vx_i64m2_m(...) __riscv_vmax_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vmax_vx_i64m4(...) __riscv_vmax_vx_i64m4(__VA_ARGS__) |
| #define | vmax_vx_i64m4_m(...) __riscv_vmax_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vmax_vx_i64m8(...) __riscv_vmax_vx_i64m8(__VA_ARGS__) |
| #define | vmax_vx_i64m8_m(...) __riscv_vmax_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vmax_vx_i8m1(...) __riscv_vmax_vx_i8m1(__VA_ARGS__) |
| #define | vmax_vx_i8m1_m(...) __riscv_vmax_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vmax_vx_i8m2(...) __riscv_vmax_vx_i8m2(__VA_ARGS__) |
| #define | vmax_vx_i8m2_m(...) __riscv_vmax_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vmax_vx_i8m4(...) __riscv_vmax_vx_i8m4(__VA_ARGS__) |
| #define | vmax_vx_i8m4_m(...) __riscv_vmax_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vmax_vx_i8m8(...) __riscv_vmax_vx_i8m8(__VA_ARGS__) |
| #define | vmax_vx_i8m8_m(...) __riscv_vmax_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vmax_vx_i8mf2(...) __riscv_vmax_vx_i8mf2(__VA_ARGS__) |
| #define | vmax_vx_i8mf2_m(...) __riscv_vmax_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vmax_vx_i8mf4(...) __riscv_vmax_vx_i8mf4(__VA_ARGS__) |
| #define | vmax_vx_i8mf4_m(...) __riscv_vmax_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vmax_vx_i8mf8(...) __riscv_vmax_vx_i8mf8(__VA_ARGS__) |
| #define | vmax_vx_i8mf8_m(...) __riscv_vmax_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u16m1(...) __riscv_vmaxu_vv_u16m1(__VA_ARGS__) |
| #define | vmaxu_vv_u16m1_m(...) __riscv_vmaxu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u16m2(...) __riscv_vmaxu_vv_u16m2(__VA_ARGS__) |
| #define | vmaxu_vv_u16m2_m(...) __riscv_vmaxu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u16m4(...) __riscv_vmaxu_vv_u16m4(__VA_ARGS__) |
| #define | vmaxu_vv_u16m4_m(...) __riscv_vmaxu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u16m8(...) __riscv_vmaxu_vv_u16m8(__VA_ARGS__) |
| #define | vmaxu_vv_u16m8_m(...) __riscv_vmaxu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u16mf2(...) __riscv_vmaxu_vv_u16mf2(__VA_ARGS__) |
| #define | vmaxu_vv_u16mf2_m(...) __riscv_vmaxu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u16mf4(...) __riscv_vmaxu_vv_u16mf4(__VA_ARGS__) |
| #define | vmaxu_vv_u16mf4_m(...) __riscv_vmaxu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u32m1(...) __riscv_vmaxu_vv_u32m1(__VA_ARGS__) |
| #define | vmaxu_vv_u32m1_m(...) __riscv_vmaxu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u32m2(...) __riscv_vmaxu_vv_u32m2(__VA_ARGS__) |
| #define | vmaxu_vv_u32m2_m(...) __riscv_vmaxu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u32m4(...) __riscv_vmaxu_vv_u32m4(__VA_ARGS__) |
| #define | vmaxu_vv_u32m4_m(...) __riscv_vmaxu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u32m8(...) __riscv_vmaxu_vv_u32m8(__VA_ARGS__) |
| #define | vmaxu_vv_u32m8_m(...) __riscv_vmaxu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u32mf2(...) __riscv_vmaxu_vv_u32mf2(__VA_ARGS__) |
| #define | vmaxu_vv_u32mf2_m(...) __riscv_vmaxu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u64m1(...) __riscv_vmaxu_vv_u64m1(__VA_ARGS__) |
| #define | vmaxu_vv_u64m1_m(...) __riscv_vmaxu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u64m2(...) __riscv_vmaxu_vv_u64m2(__VA_ARGS__) |
| #define | vmaxu_vv_u64m2_m(...) __riscv_vmaxu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u64m4(...) __riscv_vmaxu_vv_u64m4(__VA_ARGS__) |
| #define | vmaxu_vv_u64m4_m(...) __riscv_vmaxu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u64m8(...) __riscv_vmaxu_vv_u64m8(__VA_ARGS__) |
| #define | vmaxu_vv_u64m8_m(...) __riscv_vmaxu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u8m1(...) __riscv_vmaxu_vv_u8m1(__VA_ARGS__) |
| #define | vmaxu_vv_u8m1_m(...) __riscv_vmaxu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u8m2(...) __riscv_vmaxu_vv_u8m2(__VA_ARGS__) |
| #define | vmaxu_vv_u8m2_m(...) __riscv_vmaxu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u8m4(...) __riscv_vmaxu_vv_u8m4(__VA_ARGS__) |
| #define | vmaxu_vv_u8m4_m(...) __riscv_vmaxu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u8m8(...) __riscv_vmaxu_vv_u8m8(__VA_ARGS__) |
| #define | vmaxu_vv_u8m8_m(...) __riscv_vmaxu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u8mf2(...) __riscv_vmaxu_vv_u8mf2(__VA_ARGS__) |
| #define | vmaxu_vv_u8mf2_m(...) __riscv_vmaxu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u8mf4(...) __riscv_vmaxu_vv_u8mf4(__VA_ARGS__) |
| #define | vmaxu_vv_u8mf4_m(...) __riscv_vmaxu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vmaxu_vv_u8mf8(...) __riscv_vmaxu_vv_u8mf8(__VA_ARGS__) |
| #define | vmaxu_vv_u8mf8_m(...) __riscv_vmaxu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u16m1(...) __riscv_vmaxu_vx_u16m1(__VA_ARGS__) |
| #define | vmaxu_vx_u16m1_m(...) __riscv_vmaxu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u16m2(...) __riscv_vmaxu_vx_u16m2(__VA_ARGS__) |
| #define | vmaxu_vx_u16m2_m(...) __riscv_vmaxu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u16m4(...) __riscv_vmaxu_vx_u16m4(__VA_ARGS__) |
| #define | vmaxu_vx_u16m4_m(...) __riscv_vmaxu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u16m8(...) __riscv_vmaxu_vx_u16m8(__VA_ARGS__) |
| #define | vmaxu_vx_u16m8_m(...) __riscv_vmaxu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u16mf2(...) __riscv_vmaxu_vx_u16mf2(__VA_ARGS__) |
| #define | vmaxu_vx_u16mf2_m(...) __riscv_vmaxu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u16mf4(...) __riscv_vmaxu_vx_u16mf4(__VA_ARGS__) |
| #define | vmaxu_vx_u16mf4_m(...) __riscv_vmaxu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u32m1(...) __riscv_vmaxu_vx_u32m1(__VA_ARGS__) |
| #define | vmaxu_vx_u32m1_m(...) __riscv_vmaxu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u32m2(...) __riscv_vmaxu_vx_u32m2(__VA_ARGS__) |
| #define | vmaxu_vx_u32m2_m(...) __riscv_vmaxu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u32m4(...) __riscv_vmaxu_vx_u32m4(__VA_ARGS__) |
| #define | vmaxu_vx_u32m4_m(...) __riscv_vmaxu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u32m8(...) __riscv_vmaxu_vx_u32m8(__VA_ARGS__) |
| #define | vmaxu_vx_u32m8_m(...) __riscv_vmaxu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u32mf2(...) __riscv_vmaxu_vx_u32mf2(__VA_ARGS__) |
| #define | vmaxu_vx_u32mf2_m(...) __riscv_vmaxu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u64m1(...) __riscv_vmaxu_vx_u64m1(__VA_ARGS__) |
| #define | vmaxu_vx_u64m1_m(...) __riscv_vmaxu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u64m2(...) __riscv_vmaxu_vx_u64m2(__VA_ARGS__) |
| #define | vmaxu_vx_u64m2_m(...) __riscv_vmaxu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u64m4(...) __riscv_vmaxu_vx_u64m4(__VA_ARGS__) |
| #define | vmaxu_vx_u64m4_m(...) __riscv_vmaxu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u64m8(...) __riscv_vmaxu_vx_u64m8(__VA_ARGS__) |
| #define | vmaxu_vx_u64m8_m(...) __riscv_vmaxu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u8m1(...) __riscv_vmaxu_vx_u8m1(__VA_ARGS__) |
| #define | vmaxu_vx_u8m1_m(...) __riscv_vmaxu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u8m2(...) __riscv_vmaxu_vx_u8m2(__VA_ARGS__) |
| #define | vmaxu_vx_u8m2_m(...) __riscv_vmaxu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u8m4(...) __riscv_vmaxu_vx_u8m4(__VA_ARGS__) |
| #define | vmaxu_vx_u8m4_m(...) __riscv_vmaxu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u8m8(...) __riscv_vmaxu_vx_u8m8(__VA_ARGS__) |
| #define | vmaxu_vx_u8m8_m(...) __riscv_vmaxu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u8mf2(...) __riscv_vmaxu_vx_u8mf2(__VA_ARGS__) |
| #define | vmaxu_vx_u8mf2_m(...) __riscv_vmaxu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u8mf4(...) __riscv_vmaxu_vx_u8mf4(__VA_ARGS__) |
| #define | vmaxu_vx_u8mf4_m(...) __riscv_vmaxu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vmaxu_vx_u8mf8(...) __riscv_vmaxu_vx_u8mf8(__VA_ARGS__) |
| #define | vmaxu_vx_u8mf8_m(...) __riscv_vmaxu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vmclr_m_b1(...) __riscv_vmclr_m_b1(__VA_ARGS__) |
| #define | vmclr_m_b16(...) __riscv_vmclr_m_b16(__VA_ARGS__) |
| #define | vmclr_m_b2(...) __riscv_vmclr_m_b2(__VA_ARGS__) |
| #define | vmclr_m_b32(...) __riscv_vmclr_m_b32(__VA_ARGS__) |
| #define | vmclr_m_b4(...) __riscv_vmclr_m_b4(__VA_ARGS__) |
| #define | vmclr_m_b64(...) __riscv_vmclr_m_b64(__VA_ARGS__) |
| #define | vmclr_m_b8(...) __riscv_vmclr_m_b8(__VA_ARGS__) |
| #define | vmerge_vvm_f16m1(mask, op1, op2, vl) __riscv_vmerge_vvm_f16m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f16m2(mask, op1, op2, vl) __riscv_vmerge_vvm_f16m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f16m4(mask, op1, op2, vl) __riscv_vmerge_vvm_f16m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f16m8(mask, op1, op2, vl) __riscv_vmerge_vvm_f16m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f16mf2(mask, op1, op2, vl) __riscv_vmerge_vvm_f16mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f16mf4(mask, op1, op2, vl) __riscv_vmerge_vvm_f16mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f32m1(mask, op1, op2, vl) __riscv_vmerge_vvm_f32m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f32m2(mask, op1, op2, vl) __riscv_vmerge_vvm_f32m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f32m4(mask, op1, op2, vl) __riscv_vmerge_vvm_f32m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f32m8(mask, op1, op2, vl) __riscv_vmerge_vvm_f32m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f32mf2(mask, op1, op2, vl) __riscv_vmerge_vvm_f32mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f64m1(mask, op1, op2, vl) __riscv_vmerge_vvm_f64m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f64m2(mask, op1, op2, vl) __riscv_vmerge_vvm_f64m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f64m4(mask, op1, op2, vl) __riscv_vmerge_vvm_f64m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_f64m8(mask, op1, op2, vl) __riscv_vmerge_vvm_f64m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i16m1(mask, op1, op2, vl) __riscv_vmerge_vvm_i16m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i16m2(mask, op1, op2, vl) __riscv_vmerge_vvm_i16m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i16m4(mask, op1, op2, vl) __riscv_vmerge_vvm_i16m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i16m8(mask, op1, op2, vl) __riscv_vmerge_vvm_i16m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i16mf2(mask, op1, op2, vl) __riscv_vmerge_vvm_i16mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i16mf4(mask, op1, op2, vl) __riscv_vmerge_vvm_i16mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i32m1(mask, op1, op2, vl) __riscv_vmerge_vvm_i32m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i32m2(mask, op1, op2, vl) __riscv_vmerge_vvm_i32m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i32m4(mask, op1, op2, vl) __riscv_vmerge_vvm_i32m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i32m8(mask, op1, op2, vl) __riscv_vmerge_vvm_i32m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i32mf2(mask, op1, op2, vl) __riscv_vmerge_vvm_i32mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i64m1(mask, op1, op2, vl) __riscv_vmerge_vvm_i64m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i64m2(mask, op1, op2, vl) __riscv_vmerge_vvm_i64m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i64m4(mask, op1, op2, vl) __riscv_vmerge_vvm_i64m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i64m8(mask, op1, op2, vl) __riscv_vmerge_vvm_i64m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i8m1(mask, op1, op2, vl) __riscv_vmerge_vvm_i8m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i8m2(mask, op1, op2, vl) __riscv_vmerge_vvm_i8m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i8m4(mask, op1, op2, vl) __riscv_vmerge_vvm_i8m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i8m8(mask, op1, op2, vl) __riscv_vmerge_vvm_i8m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i8mf2(mask, op1, op2, vl) __riscv_vmerge_vvm_i8mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i8mf4(mask, op1, op2, vl) __riscv_vmerge_vvm_i8mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_i8mf8(mask, op1, op2, vl) __riscv_vmerge_vvm_i8mf8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u16m1(mask, op1, op2, vl) __riscv_vmerge_vvm_u16m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u16m2(mask, op1, op2, vl) __riscv_vmerge_vvm_u16m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u16m4(mask, op1, op2, vl) __riscv_vmerge_vvm_u16m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u16m8(mask, op1, op2, vl) __riscv_vmerge_vvm_u16m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u16mf2(mask, op1, op2, vl) __riscv_vmerge_vvm_u16mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u16mf4(mask, op1, op2, vl) __riscv_vmerge_vvm_u16mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u32m1(mask, op1, op2, vl) __riscv_vmerge_vvm_u32m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u32m2(mask, op1, op2, vl) __riscv_vmerge_vvm_u32m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u32m4(mask, op1, op2, vl) __riscv_vmerge_vvm_u32m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u32m8(mask, op1, op2, vl) __riscv_vmerge_vvm_u32m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u32mf2(mask, op1, op2, vl) __riscv_vmerge_vvm_u32mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u64m1(mask, op1, op2, vl) __riscv_vmerge_vvm_u64m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u64m2(mask, op1, op2, vl) __riscv_vmerge_vvm_u64m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u64m4(mask, op1, op2, vl) __riscv_vmerge_vvm_u64m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u64m8(mask, op1, op2, vl) __riscv_vmerge_vvm_u64m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u8m1(mask, op1, op2, vl) __riscv_vmerge_vvm_u8m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u8m2(mask, op1, op2, vl) __riscv_vmerge_vvm_u8m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u8m4(mask, op1, op2, vl) __riscv_vmerge_vvm_u8m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u8m8(mask, op1, op2, vl) __riscv_vmerge_vvm_u8m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u8mf2(mask, op1, op2, vl) __riscv_vmerge_vvm_u8mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u8mf4(mask, op1, op2, vl) __riscv_vmerge_vvm_u8mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vvm_u8mf8(mask, op1, op2, vl) __riscv_vmerge_vvm_u8mf8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i16m1(mask, op1, op2, vl) __riscv_vmerge_vxm_i16m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i16m2(mask, op1, op2, vl) __riscv_vmerge_vxm_i16m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i16m4(mask, op1, op2, vl) __riscv_vmerge_vxm_i16m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i16m8(mask, op1, op2, vl) __riscv_vmerge_vxm_i16m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i16mf2(mask, op1, op2, vl) __riscv_vmerge_vxm_i16mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i16mf4(mask, op1, op2, vl) __riscv_vmerge_vxm_i16mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i32m1(mask, op1, op2, vl) __riscv_vmerge_vxm_i32m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i32m2(mask, op1, op2, vl) __riscv_vmerge_vxm_i32m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i32m4(mask, op1, op2, vl) __riscv_vmerge_vxm_i32m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i32m8(mask, op1, op2, vl) __riscv_vmerge_vxm_i32m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i32mf2(mask, op1, op2, vl) __riscv_vmerge_vxm_i32mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i64m1(mask, op1, op2, vl) __riscv_vmerge_vxm_i64m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i64m2(mask, op1, op2, vl) __riscv_vmerge_vxm_i64m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i64m4(mask, op1, op2, vl) __riscv_vmerge_vxm_i64m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i64m8(mask, op1, op2, vl) __riscv_vmerge_vxm_i64m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i8m1(mask, op1, op2, vl) __riscv_vmerge_vxm_i8m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i8m2(mask, op1, op2, vl) __riscv_vmerge_vxm_i8m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i8m4(mask, op1, op2, vl) __riscv_vmerge_vxm_i8m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i8m8(mask, op1, op2, vl) __riscv_vmerge_vxm_i8m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i8mf2(mask, op1, op2, vl) __riscv_vmerge_vxm_i8mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i8mf4(mask, op1, op2, vl) __riscv_vmerge_vxm_i8mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_i8mf8(mask, op1, op2, vl) __riscv_vmerge_vxm_i8mf8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u16m1(mask, op1, op2, vl) __riscv_vmerge_vxm_u16m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u16m2(mask, op1, op2, vl) __riscv_vmerge_vxm_u16m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u16m4(mask, op1, op2, vl) __riscv_vmerge_vxm_u16m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u16m8(mask, op1, op2, vl) __riscv_vmerge_vxm_u16m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u16mf2(mask, op1, op2, vl) __riscv_vmerge_vxm_u16mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u16mf4(mask, op1, op2, vl) __riscv_vmerge_vxm_u16mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u32m1(mask, op1, op2, vl) __riscv_vmerge_vxm_u32m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u32m2(mask, op1, op2, vl) __riscv_vmerge_vxm_u32m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u32m4(mask, op1, op2, vl) __riscv_vmerge_vxm_u32m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u32m8(mask, op1, op2, vl) __riscv_vmerge_vxm_u32m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u32mf2(mask, op1, op2, vl) __riscv_vmerge_vxm_u32mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u64m1(mask, op1, op2, vl) __riscv_vmerge_vxm_u64m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u64m2(mask, op1, op2, vl) __riscv_vmerge_vxm_u64m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u64m4(mask, op1, op2, vl) __riscv_vmerge_vxm_u64m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u64m8(mask, op1, op2, vl) __riscv_vmerge_vxm_u64m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u8m1(mask, op1, op2, vl) __riscv_vmerge_vxm_u8m1((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u8m2(mask, op1, op2, vl) __riscv_vmerge_vxm_u8m2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u8m4(mask, op1, op2, vl) __riscv_vmerge_vxm_u8m4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u8m8(mask, op1, op2, vl) __riscv_vmerge_vxm_u8m8((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u8mf2(mask, op1, op2, vl) __riscv_vmerge_vxm_u8mf2((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u8mf4(mask, op1, op2, vl) __riscv_vmerge_vxm_u8mf4((op1), (op2), (mask), (vl)) |
| #define | vmerge_vxm_u8mf8(mask, op1, op2, vl) __riscv_vmerge_vxm_u8mf8((op1), (op2), (mask), (vl)) |
| #define | vmfeq_vf_f16m1_b16(...) __riscv_vmfeq_vf_f16m1_b16(__VA_ARGS__) |
| #define | vmfeq_vf_f16m1_b16_m(...) __riscv_vmfeq_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f16m2_b8(...) __riscv_vmfeq_vf_f16m2_b8(__VA_ARGS__) |
| #define | vmfeq_vf_f16m2_b8_m(...) __riscv_vmfeq_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f16m4_b4(...) __riscv_vmfeq_vf_f16m4_b4(__VA_ARGS__) |
| #define | vmfeq_vf_f16m4_b4_m(...) __riscv_vmfeq_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f16m8_b2(...) __riscv_vmfeq_vf_f16m8_b2(__VA_ARGS__) |
| #define | vmfeq_vf_f16m8_b2_m(...) __riscv_vmfeq_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f16mf2_b32(...) __riscv_vmfeq_vf_f16mf2_b32(__VA_ARGS__) |
| #define | vmfeq_vf_f16mf2_b32_m(...) __riscv_vmfeq_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f16mf4_b64(...) __riscv_vmfeq_vf_f16mf4_b64(__VA_ARGS__) |
| #define | vmfeq_vf_f16mf4_b64_m(...) __riscv_vmfeq_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f32m1_b32(...) __riscv_vmfeq_vf_f32m1_b32(__VA_ARGS__) |
| #define | vmfeq_vf_f32m1_b32_m(...) __riscv_vmfeq_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f32m2_b16(...) __riscv_vmfeq_vf_f32m2_b16(__VA_ARGS__) |
| #define | vmfeq_vf_f32m2_b16_m(...) __riscv_vmfeq_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f32m4_b8(...) __riscv_vmfeq_vf_f32m4_b8(__VA_ARGS__) |
| #define | vmfeq_vf_f32m4_b8_m(...) __riscv_vmfeq_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f32m8_b4(...) __riscv_vmfeq_vf_f32m8_b4(__VA_ARGS__) |
| #define | vmfeq_vf_f32m8_b4_m(...) __riscv_vmfeq_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f32mf2_b64(...) __riscv_vmfeq_vf_f32mf2_b64(__VA_ARGS__) |
| #define | vmfeq_vf_f32mf2_b64_m(...) __riscv_vmfeq_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f64m1_b64(...) __riscv_vmfeq_vf_f64m1_b64(__VA_ARGS__) |
| #define | vmfeq_vf_f64m1_b64_m(...) __riscv_vmfeq_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f64m2_b32(...) __riscv_vmfeq_vf_f64m2_b32(__VA_ARGS__) |
| #define | vmfeq_vf_f64m2_b32_m(...) __riscv_vmfeq_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f64m4_b16(...) __riscv_vmfeq_vf_f64m4_b16(__VA_ARGS__) |
| #define | vmfeq_vf_f64m4_b16_m(...) __riscv_vmfeq_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfeq_vf_f64m8_b8(...) __riscv_vmfeq_vf_f64m8_b8(__VA_ARGS__) |
| #define | vmfeq_vf_f64m8_b8_m(...) __riscv_vmfeq_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f16m1_b16(...) __riscv_vmfeq_vv_f16m1_b16(__VA_ARGS__) |
| #define | vmfeq_vv_f16m1_b16_m(...) __riscv_vmfeq_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f16m2_b8(...) __riscv_vmfeq_vv_f16m2_b8(__VA_ARGS__) |
| #define | vmfeq_vv_f16m2_b8_m(...) __riscv_vmfeq_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f16m4_b4(...) __riscv_vmfeq_vv_f16m4_b4(__VA_ARGS__) |
| #define | vmfeq_vv_f16m4_b4_m(...) __riscv_vmfeq_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f16m8_b2(...) __riscv_vmfeq_vv_f16m8_b2(__VA_ARGS__) |
| #define | vmfeq_vv_f16m8_b2_m(...) __riscv_vmfeq_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f16mf2_b32(...) __riscv_vmfeq_vv_f16mf2_b32(__VA_ARGS__) |
| #define | vmfeq_vv_f16mf2_b32_m(...) __riscv_vmfeq_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f16mf4_b64(...) __riscv_vmfeq_vv_f16mf4_b64(__VA_ARGS__) |
| #define | vmfeq_vv_f16mf4_b64_m(...) __riscv_vmfeq_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f32m1_b32(...) __riscv_vmfeq_vv_f32m1_b32(__VA_ARGS__) |
| #define | vmfeq_vv_f32m1_b32_m(...) __riscv_vmfeq_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f32m2_b16(...) __riscv_vmfeq_vv_f32m2_b16(__VA_ARGS__) |
| #define | vmfeq_vv_f32m2_b16_m(...) __riscv_vmfeq_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f32m4_b8(...) __riscv_vmfeq_vv_f32m4_b8(__VA_ARGS__) |
| #define | vmfeq_vv_f32m4_b8_m(...) __riscv_vmfeq_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f32m8_b4(...) __riscv_vmfeq_vv_f32m8_b4(__VA_ARGS__) |
| #define | vmfeq_vv_f32m8_b4_m(...) __riscv_vmfeq_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f32mf2_b64(...) __riscv_vmfeq_vv_f32mf2_b64(__VA_ARGS__) |
| #define | vmfeq_vv_f32mf2_b64_m(...) __riscv_vmfeq_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f64m1_b64(...) __riscv_vmfeq_vv_f64m1_b64(__VA_ARGS__) |
| #define | vmfeq_vv_f64m1_b64_m(...) __riscv_vmfeq_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f64m2_b32(...) __riscv_vmfeq_vv_f64m2_b32(__VA_ARGS__) |
| #define | vmfeq_vv_f64m2_b32_m(...) __riscv_vmfeq_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f64m4_b16(...) __riscv_vmfeq_vv_f64m4_b16(__VA_ARGS__) |
| #define | vmfeq_vv_f64m4_b16_m(...) __riscv_vmfeq_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfeq_vv_f64m8_b8(...) __riscv_vmfeq_vv_f64m8_b8(__VA_ARGS__) |
| #define | vmfeq_vv_f64m8_b8_m(...) __riscv_vmfeq_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfge_vf_f16m1_b16(...) __riscv_vmfge_vf_f16m1_b16(__VA_ARGS__) |
| #define | vmfge_vf_f16m1_b16_m(...) __riscv_vmfge_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfge_vf_f16m2_b8(...) __riscv_vmfge_vf_f16m2_b8(__VA_ARGS__) |
| #define | vmfge_vf_f16m2_b8_m(...) __riscv_vmfge_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfge_vf_f16m4_b4(...) __riscv_vmfge_vf_f16m4_b4(__VA_ARGS__) |
| #define | vmfge_vf_f16m4_b4_m(...) __riscv_vmfge_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfge_vf_f16m8_b2(...) __riscv_vmfge_vf_f16m8_b2(__VA_ARGS__) |
| #define | vmfge_vf_f16m8_b2_m(...) __riscv_vmfge_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfge_vf_f16mf2_b32(...) __riscv_vmfge_vf_f16mf2_b32(__VA_ARGS__) |
| #define | vmfge_vf_f16mf2_b32_m(...) __riscv_vmfge_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfge_vf_f16mf4_b64(...) __riscv_vmfge_vf_f16mf4_b64(__VA_ARGS__) |
| #define | vmfge_vf_f16mf4_b64_m(...) __riscv_vmfge_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfge_vf_f32m1_b32(...) __riscv_vmfge_vf_f32m1_b32(__VA_ARGS__) |
| #define | vmfge_vf_f32m1_b32_m(...) __riscv_vmfge_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfge_vf_f32m2_b16(...) __riscv_vmfge_vf_f32m2_b16(__VA_ARGS__) |
| #define | vmfge_vf_f32m2_b16_m(...) __riscv_vmfge_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfge_vf_f32m4_b8(...) __riscv_vmfge_vf_f32m4_b8(__VA_ARGS__) |
| #define | vmfge_vf_f32m4_b8_m(...) __riscv_vmfge_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfge_vf_f32m8_b4(...) __riscv_vmfge_vf_f32m8_b4(__VA_ARGS__) |
| #define | vmfge_vf_f32m8_b4_m(...) __riscv_vmfge_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfge_vf_f32mf2_b64(...) __riscv_vmfge_vf_f32mf2_b64(__VA_ARGS__) |
| #define | vmfge_vf_f32mf2_b64_m(...) __riscv_vmfge_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfge_vf_f64m1_b64(...) __riscv_vmfge_vf_f64m1_b64(__VA_ARGS__) |
| #define | vmfge_vf_f64m1_b64_m(...) __riscv_vmfge_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfge_vf_f64m2_b32(...) __riscv_vmfge_vf_f64m2_b32(__VA_ARGS__) |
| #define | vmfge_vf_f64m2_b32_m(...) __riscv_vmfge_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfge_vf_f64m4_b16(...) __riscv_vmfge_vf_f64m4_b16(__VA_ARGS__) |
| #define | vmfge_vf_f64m4_b16_m(...) __riscv_vmfge_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfge_vf_f64m8_b8(...) __riscv_vmfge_vf_f64m8_b8(__VA_ARGS__) |
| #define | vmfge_vf_f64m8_b8_m(...) __riscv_vmfge_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfge_vv_f16m1_b16(...) __riscv_vmfge_vv_f16m1_b16(__VA_ARGS__) |
| #define | vmfge_vv_f16m1_b16_m(...) __riscv_vmfge_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfge_vv_f16m2_b8(...) __riscv_vmfge_vv_f16m2_b8(__VA_ARGS__) |
| #define | vmfge_vv_f16m2_b8_m(...) __riscv_vmfge_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfge_vv_f16m4_b4(...) __riscv_vmfge_vv_f16m4_b4(__VA_ARGS__) |
| #define | vmfge_vv_f16m4_b4_m(...) __riscv_vmfge_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfge_vv_f16m8_b2(...) __riscv_vmfge_vv_f16m8_b2(__VA_ARGS__) |
| #define | vmfge_vv_f16m8_b2_m(...) __riscv_vmfge_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfge_vv_f16mf2_b32(...) __riscv_vmfge_vv_f16mf2_b32(__VA_ARGS__) |
| #define | vmfge_vv_f16mf2_b32_m(...) __riscv_vmfge_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfge_vv_f16mf4_b64(...) __riscv_vmfge_vv_f16mf4_b64(__VA_ARGS__) |
| #define | vmfge_vv_f16mf4_b64_m(...) __riscv_vmfge_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfge_vv_f32m1_b32(...) __riscv_vmfge_vv_f32m1_b32(__VA_ARGS__) |
| #define | vmfge_vv_f32m1_b32_m(...) __riscv_vmfge_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfge_vv_f32m2_b16(...) __riscv_vmfge_vv_f32m2_b16(__VA_ARGS__) |
| #define | vmfge_vv_f32m2_b16_m(...) __riscv_vmfge_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfge_vv_f32m4_b8(...) __riscv_vmfge_vv_f32m4_b8(__VA_ARGS__) |
| #define | vmfge_vv_f32m4_b8_m(...) __riscv_vmfge_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfge_vv_f32m8_b4(...) __riscv_vmfge_vv_f32m8_b4(__VA_ARGS__) |
| #define | vmfge_vv_f32m8_b4_m(...) __riscv_vmfge_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfge_vv_f32mf2_b64(...) __riscv_vmfge_vv_f32mf2_b64(__VA_ARGS__) |
| #define | vmfge_vv_f32mf2_b64_m(...) __riscv_vmfge_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfge_vv_f64m1_b64(...) __riscv_vmfge_vv_f64m1_b64(__VA_ARGS__) |
| #define | vmfge_vv_f64m1_b64_m(...) __riscv_vmfge_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfge_vv_f64m2_b32(...) __riscv_vmfge_vv_f64m2_b32(__VA_ARGS__) |
| #define | vmfge_vv_f64m2_b32_m(...) __riscv_vmfge_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfge_vv_f64m4_b16(...) __riscv_vmfge_vv_f64m4_b16(__VA_ARGS__) |
| #define | vmfge_vv_f64m4_b16_m(...) __riscv_vmfge_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfge_vv_f64m8_b8(...) __riscv_vmfge_vv_f64m8_b8(__VA_ARGS__) |
| #define | vmfge_vv_f64m8_b8_m(...) __riscv_vmfge_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f16m1_b16(...) __riscv_vmfgt_vf_f16m1_b16(__VA_ARGS__) |
| #define | vmfgt_vf_f16m1_b16_m(...) __riscv_vmfgt_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f16m2_b8(...) __riscv_vmfgt_vf_f16m2_b8(__VA_ARGS__) |
| #define | vmfgt_vf_f16m2_b8_m(...) __riscv_vmfgt_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f16m4_b4(...) __riscv_vmfgt_vf_f16m4_b4(__VA_ARGS__) |
| #define | vmfgt_vf_f16m4_b4_m(...) __riscv_vmfgt_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f16m8_b2(...) __riscv_vmfgt_vf_f16m8_b2(__VA_ARGS__) |
| #define | vmfgt_vf_f16m8_b2_m(...) __riscv_vmfgt_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f16mf2_b32(...) __riscv_vmfgt_vf_f16mf2_b32(__VA_ARGS__) |
| #define | vmfgt_vf_f16mf2_b32_m(...) __riscv_vmfgt_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f16mf4_b64(...) __riscv_vmfgt_vf_f16mf4_b64(__VA_ARGS__) |
| #define | vmfgt_vf_f16mf4_b64_m(...) __riscv_vmfgt_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f32m1_b32(...) __riscv_vmfgt_vf_f32m1_b32(__VA_ARGS__) |
| #define | vmfgt_vf_f32m1_b32_m(...) __riscv_vmfgt_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f32m2_b16(...) __riscv_vmfgt_vf_f32m2_b16(__VA_ARGS__) |
| #define | vmfgt_vf_f32m2_b16_m(...) __riscv_vmfgt_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f32m4_b8(...) __riscv_vmfgt_vf_f32m4_b8(__VA_ARGS__) |
| #define | vmfgt_vf_f32m4_b8_m(...) __riscv_vmfgt_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f32m8_b4(...) __riscv_vmfgt_vf_f32m8_b4(__VA_ARGS__) |
| #define | vmfgt_vf_f32m8_b4_m(...) __riscv_vmfgt_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f32mf2_b64(...) __riscv_vmfgt_vf_f32mf2_b64(__VA_ARGS__) |
| #define | vmfgt_vf_f32mf2_b64_m(...) __riscv_vmfgt_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f64m1_b64(...) __riscv_vmfgt_vf_f64m1_b64(__VA_ARGS__) |
| #define | vmfgt_vf_f64m1_b64_m(...) __riscv_vmfgt_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f64m2_b32(...) __riscv_vmfgt_vf_f64m2_b32(__VA_ARGS__) |
| #define | vmfgt_vf_f64m2_b32_m(...) __riscv_vmfgt_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f64m4_b16(...) __riscv_vmfgt_vf_f64m4_b16(__VA_ARGS__) |
| #define | vmfgt_vf_f64m4_b16_m(...) __riscv_vmfgt_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfgt_vf_f64m8_b8(...) __riscv_vmfgt_vf_f64m8_b8(__VA_ARGS__) |
| #define | vmfgt_vf_f64m8_b8_m(...) __riscv_vmfgt_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f16m1_b16(...) __riscv_vmfgt_vv_f16m1_b16(__VA_ARGS__) |
| #define | vmfgt_vv_f16m1_b16_m(...) __riscv_vmfgt_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f16m2_b8(...) __riscv_vmfgt_vv_f16m2_b8(__VA_ARGS__) |
| #define | vmfgt_vv_f16m2_b8_m(...) __riscv_vmfgt_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f16m4_b4(...) __riscv_vmfgt_vv_f16m4_b4(__VA_ARGS__) |
| #define | vmfgt_vv_f16m4_b4_m(...) __riscv_vmfgt_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f16m8_b2(...) __riscv_vmfgt_vv_f16m8_b2(__VA_ARGS__) |
| #define | vmfgt_vv_f16m8_b2_m(...) __riscv_vmfgt_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f16mf2_b32(...) __riscv_vmfgt_vv_f16mf2_b32(__VA_ARGS__) |
| #define | vmfgt_vv_f16mf2_b32_m(...) __riscv_vmfgt_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f16mf4_b64(...) __riscv_vmfgt_vv_f16mf4_b64(__VA_ARGS__) |
| #define | vmfgt_vv_f16mf4_b64_m(...) __riscv_vmfgt_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f32m1_b32(...) __riscv_vmfgt_vv_f32m1_b32(__VA_ARGS__) |
| #define | vmfgt_vv_f32m1_b32_m(...) __riscv_vmfgt_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f32m2_b16(...) __riscv_vmfgt_vv_f32m2_b16(__VA_ARGS__) |
| #define | vmfgt_vv_f32m2_b16_m(...) __riscv_vmfgt_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f32m4_b8(...) __riscv_vmfgt_vv_f32m4_b8(__VA_ARGS__) |
| #define | vmfgt_vv_f32m4_b8_m(...) __riscv_vmfgt_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f32m8_b4(...) __riscv_vmfgt_vv_f32m8_b4(__VA_ARGS__) |
| #define | vmfgt_vv_f32m8_b4_m(...) __riscv_vmfgt_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f32mf2_b64(...) __riscv_vmfgt_vv_f32mf2_b64(__VA_ARGS__) |
| #define | vmfgt_vv_f32mf2_b64_m(...) __riscv_vmfgt_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f64m1_b64(...) __riscv_vmfgt_vv_f64m1_b64(__VA_ARGS__) |
| #define | vmfgt_vv_f64m1_b64_m(...) __riscv_vmfgt_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f64m2_b32(...) __riscv_vmfgt_vv_f64m2_b32(__VA_ARGS__) |
| #define | vmfgt_vv_f64m2_b32_m(...) __riscv_vmfgt_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f64m4_b16(...) __riscv_vmfgt_vv_f64m4_b16(__VA_ARGS__) |
| #define | vmfgt_vv_f64m4_b16_m(...) __riscv_vmfgt_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfgt_vv_f64m8_b8(...) __riscv_vmfgt_vv_f64m8_b8(__VA_ARGS__) |
| #define | vmfgt_vv_f64m8_b8_m(...) __riscv_vmfgt_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfle_vf_f16m1_b16(...) __riscv_vmfle_vf_f16m1_b16(__VA_ARGS__) |
| #define | vmfle_vf_f16m1_b16_m(...) __riscv_vmfle_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfle_vf_f16m2_b8(...) __riscv_vmfle_vf_f16m2_b8(__VA_ARGS__) |
| #define | vmfle_vf_f16m2_b8_m(...) __riscv_vmfle_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfle_vf_f16m4_b4(...) __riscv_vmfle_vf_f16m4_b4(__VA_ARGS__) |
| #define | vmfle_vf_f16m4_b4_m(...) __riscv_vmfle_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfle_vf_f16m8_b2(...) __riscv_vmfle_vf_f16m8_b2(__VA_ARGS__) |
| #define | vmfle_vf_f16m8_b2_m(...) __riscv_vmfle_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfle_vf_f16mf2_b32(...) __riscv_vmfle_vf_f16mf2_b32(__VA_ARGS__) |
| #define | vmfle_vf_f16mf2_b32_m(...) __riscv_vmfle_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfle_vf_f16mf4_b64(...) __riscv_vmfle_vf_f16mf4_b64(__VA_ARGS__) |
| #define | vmfle_vf_f16mf4_b64_m(...) __riscv_vmfle_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfle_vf_f32m1_b32(...) __riscv_vmfle_vf_f32m1_b32(__VA_ARGS__) |
| #define | vmfle_vf_f32m1_b32_m(...) __riscv_vmfle_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfle_vf_f32m2_b16(...) __riscv_vmfle_vf_f32m2_b16(__VA_ARGS__) |
| #define | vmfle_vf_f32m2_b16_m(...) __riscv_vmfle_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfle_vf_f32m4_b8(...) __riscv_vmfle_vf_f32m4_b8(__VA_ARGS__) |
| #define | vmfle_vf_f32m4_b8_m(...) __riscv_vmfle_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfle_vf_f32m8_b4(...) __riscv_vmfle_vf_f32m8_b4(__VA_ARGS__) |
| #define | vmfle_vf_f32m8_b4_m(...) __riscv_vmfle_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfle_vf_f32mf2_b64(...) __riscv_vmfle_vf_f32mf2_b64(__VA_ARGS__) |
| #define | vmfle_vf_f32mf2_b64_m(...) __riscv_vmfle_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfle_vf_f64m1_b64(...) __riscv_vmfle_vf_f64m1_b64(__VA_ARGS__) |
| #define | vmfle_vf_f64m1_b64_m(...) __riscv_vmfle_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfle_vf_f64m2_b32(...) __riscv_vmfle_vf_f64m2_b32(__VA_ARGS__) |
| #define | vmfle_vf_f64m2_b32_m(...) __riscv_vmfle_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfle_vf_f64m4_b16(...) __riscv_vmfle_vf_f64m4_b16(__VA_ARGS__) |
| #define | vmfle_vf_f64m4_b16_m(...) __riscv_vmfle_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfle_vf_f64m8_b8(...) __riscv_vmfle_vf_f64m8_b8(__VA_ARGS__) |
| #define | vmfle_vf_f64m8_b8_m(...) __riscv_vmfle_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfle_vv_f16m1_b16(...) __riscv_vmfle_vv_f16m1_b16(__VA_ARGS__) |
| #define | vmfle_vv_f16m1_b16_m(...) __riscv_vmfle_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfle_vv_f16m2_b8(...) __riscv_vmfle_vv_f16m2_b8(__VA_ARGS__) |
| #define | vmfle_vv_f16m2_b8_m(...) __riscv_vmfle_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfle_vv_f16m4_b4(...) __riscv_vmfle_vv_f16m4_b4(__VA_ARGS__) |
| #define | vmfle_vv_f16m4_b4_m(...) __riscv_vmfle_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfle_vv_f16m8_b2(...) __riscv_vmfle_vv_f16m8_b2(__VA_ARGS__) |
| #define | vmfle_vv_f16m8_b2_m(...) __riscv_vmfle_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfle_vv_f16mf2_b32(...) __riscv_vmfle_vv_f16mf2_b32(__VA_ARGS__) |
| #define | vmfle_vv_f16mf2_b32_m(...) __riscv_vmfle_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfle_vv_f16mf4_b64(...) __riscv_vmfle_vv_f16mf4_b64(__VA_ARGS__) |
| #define | vmfle_vv_f16mf4_b64_m(...) __riscv_vmfle_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfle_vv_f32m1_b32(...) __riscv_vmfle_vv_f32m1_b32(__VA_ARGS__) |
| #define | vmfle_vv_f32m1_b32_m(...) __riscv_vmfle_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfle_vv_f32m2_b16(...) __riscv_vmfle_vv_f32m2_b16(__VA_ARGS__) |
| #define | vmfle_vv_f32m2_b16_m(...) __riscv_vmfle_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfle_vv_f32m4_b8(...) __riscv_vmfle_vv_f32m4_b8(__VA_ARGS__) |
| #define | vmfle_vv_f32m4_b8_m(...) __riscv_vmfle_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfle_vv_f32m8_b4(...) __riscv_vmfle_vv_f32m8_b4(__VA_ARGS__) |
| #define | vmfle_vv_f32m8_b4_m(...) __riscv_vmfle_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfle_vv_f32mf2_b64(...) __riscv_vmfle_vv_f32mf2_b64(__VA_ARGS__) |
| #define | vmfle_vv_f32mf2_b64_m(...) __riscv_vmfle_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfle_vv_f64m1_b64(...) __riscv_vmfle_vv_f64m1_b64(__VA_ARGS__) |
| #define | vmfle_vv_f64m1_b64_m(...) __riscv_vmfle_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfle_vv_f64m2_b32(...) __riscv_vmfle_vv_f64m2_b32(__VA_ARGS__) |
| #define | vmfle_vv_f64m2_b32_m(...) __riscv_vmfle_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfle_vv_f64m4_b16(...) __riscv_vmfle_vv_f64m4_b16(__VA_ARGS__) |
| #define | vmfle_vv_f64m4_b16_m(...) __riscv_vmfle_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfle_vv_f64m8_b8(...) __riscv_vmfle_vv_f64m8_b8(__VA_ARGS__) |
| #define | vmfle_vv_f64m8_b8_m(...) __riscv_vmfle_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmflt_vf_f16m1_b16(...) __riscv_vmflt_vf_f16m1_b16(__VA_ARGS__) |
| #define | vmflt_vf_f16m1_b16_m(...) __riscv_vmflt_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmflt_vf_f16m2_b8(...) __riscv_vmflt_vf_f16m2_b8(__VA_ARGS__) |
| #define | vmflt_vf_f16m2_b8_m(...) __riscv_vmflt_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmflt_vf_f16m4_b4(...) __riscv_vmflt_vf_f16m4_b4(__VA_ARGS__) |
| #define | vmflt_vf_f16m4_b4_m(...) __riscv_vmflt_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmflt_vf_f16m8_b2(...) __riscv_vmflt_vf_f16m8_b2(__VA_ARGS__) |
| #define | vmflt_vf_f16m8_b2_m(...) __riscv_vmflt_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmflt_vf_f16mf2_b32(...) __riscv_vmflt_vf_f16mf2_b32(__VA_ARGS__) |
| #define | vmflt_vf_f16mf2_b32_m(...) __riscv_vmflt_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmflt_vf_f16mf4_b64(...) __riscv_vmflt_vf_f16mf4_b64(__VA_ARGS__) |
| #define | vmflt_vf_f16mf4_b64_m(...) __riscv_vmflt_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmflt_vf_f32m1_b32(...) __riscv_vmflt_vf_f32m1_b32(__VA_ARGS__) |
| #define | vmflt_vf_f32m1_b32_m(...) __riscv_vmflt_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmflt_vf_f32m2_b16(...) __riscv_vmflt_vf_f32m2_b16(__VA_ARGS__) |
| #define | vmflt_vf_f32m2_b16_m(...) __riscv_vmflt_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmflt_vf_f32m4_b8(...) __riscv_vmflt_vf_f32m4_b8(__VA_ARGS__) |
| #define | vmflt_vf_f32m4_b8_m(...) __riscv_vmflt_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmflt_vf_f32m8_b4(...) __riscv_vmflt_vf_f32m8_b4(__VA_ARGS__) |
| #define | vmflt_vf_f32m8_b4_m(...) __riscv_vmflt_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmflt_vf_f32mf2_b64(...) __riscv_vmflt_vf_f32mf2_b64(__VA_ARGS__) |
| #define | vmflt_vf_f32mf2_b64_m(...) __riscv_vmflt_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmflt_vf_f64m1_b64(...) __riscv_vmflt_vf_f64m1_b64(__VA_ARGS__) |
| #define | vmflt_vf_f64m1_b64_m(...) __riscv_vmflt_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmflt_vf_f64m2_b32(...) __riscv_vmflt_vf_f64m2_b32(__VA_ARGS__) |
| #define | vmflt_vf_f64m2_b32_m(...) __riscv_vmflt_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmflt_vf_f64m4_b16(...) __riscv_vmflt_vf_f64m4_b16(__VA_ARGS__) |
| #define | vmflt_vf_f64m4_b16_m(...) __riscv_vmflt_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmflt_vf_f64m8_b8(...) __riscv_vmflt_vf_f64m8_b8(__VA_ARGS__) |
| #define | vmflt_vf_f64m8_b8_m(...) __riscv_vmflt_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmflt_vv_f16m1_b16(...) __riscv_vmflt_vv_f16m1_b16(__VA_ARGS__) |
| #define | vmflt_vv_f16m1_b16_m(...) __riscv_vmflt_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmflt_vv_f16m2_b8(...) __riscv_vmflt_vv_f16m2_b8(__VA_ARGS__) |
| #define | vmflt_vv_f16m2_b8_m(...) __riscv_vmflt_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmflt_vv_f16m4_b4(...) __riscv_vmflt_vv_f16m4_b4(__VA_ARGS__) |
| #define | vmflt_vv_f16m4_b4_m(...) __riscv_vmflt_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmflt_vv_f16m8_b2(...) __riscv_vmflt_vv_f16m8_b2(__VA_ARGS__) |
| #define | vmflt_vv_f16m8_b2_m(...) __riscv_vmflt_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmflt_vv_f16mf2_b32(...) __riscv_vmflt_vv_f16mf2_b32(__VA_ARGS__) |
| #define | vmflt_vv_f16mf2_b32_m(...) __riscv_vmflt_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmflt_vv_f16mf4_b64(...) __riscv_vmflt_vv_f16mf4_b64(__VA_ARGS__) |
| #define | vmflt_vv_f16mf4_b64_m(...) __riscv_vmflt_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmflt_vv_f32m1_b32(...) __riscv_vmflt_vv_f32m1_b32(__VA_ARGS__) |
| #define | vmflt_vv_f32m1_b32_m(...) __riscv_vmflt_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmflt_vv_f32m2_b16(...) __riscv_vmflt_vv_f32m2_b16(__VA_ARGS__) |
| #define | vmflt_vv_f32m2_b16_m(...) __riscv_vmflt_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmflt_vv_f32m4_b8(...) __riscv_vmflt_vv_f32m4_b8(__VA_ARGS__) |
| #define | vmflt_vv_f32m4_b8_m(...) __riscv_vmflt_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmflt_vv_f32m8_b4(...) __riscv_vmflt_vv_f32m8_b4(__VA_ARGS__) |
| #define | vmflt_vv_f32m8_b4_m(...) __riscv_vmflt_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmflt_vv_f32mf2_b64(...) __riscv_vmflt_vv_f32mf2_b64(__VA_ARGS__) |
| #define | vmflt_vv_f32mf2_b64_m(...) __riscv_vmflt_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmflt_vv_f64m1_b64(...) __riscv_vmflt_vv_f64m1_b64(__VA_ARGS__) |
| #define | vmflt_vv_f64m1_b64_m(...) __riscv_vmflt_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmflt_vv_f64m2_b32(...) __riscv_vmflt_vv_f64m2_b32(__VA_ARGS__) |
| #define | vmflt_vv_f64m2_b32_m(...) __riscv_vmflt_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmflt_vv_f64m4_b16(...) __riscv_vmflt_vv_f64m4_b16(__VA_ARGS__) |
| #define | vmflt_vv_f64m4_b16_m(...) __riscv_vmflt_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmflt_vv_f64m8_b8(...) __riscv_vmflt_vv_f64m8_b8(__VA_ARGS__) |
| #define | vmflt_vv_f64m8_b8_m(...) __riscv_vmflt_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfne_vf_f16m1_b16(...) __riscv_vmfne_vf_f16m1_b16(__VA_ARGS__) |
| #define | vmfne_vf_f16m1_b16_m(...) __riscv_vmfne_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfne_vf_f16m2_b8(...) __riscv_vmfne_vf_f16m2_b8(__VA_ARGS__) |
| #define | vmfne_vf_f16m2_b8_m(...) __riscv_vmfne_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfne_vf_f16m4_b4(...) __riscv_vmfne_vf_f16m4_b4(__VA_ARGS__) |
| #define | vmfne_vf_f16m4_b4_m(...) __riscv_vmfne_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfne_vf_f16m8_b2(...) __riscv_vmfne_vf_f16m8_b2(__VA_ARGS__) |
| #define | vmfne_vf_f16m8_b2_m(...) __riscv_vmfne_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfne_vf_f16mf2_b32(...) __riscv_vmfne_vf_f16mf2_b32(__VA_ARGS__) |
| #define | vmfne_vf_f16mf2_b32_m(...) __riscv_vmfne_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfne_vf_f16mf4_b64(...) __riscv_vmfne_vf_f16mf4_b64(__VA_ARGS__) |
| #define | vmfne_vf_f16mf4_b64_m(...) __riscv_vmfne_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfne_vf_f32m1_b32(...) __riscv_vmfne_vf_f32m1_b32(__VA_ARGS__) |
| #define | vmfne_vf_f32m1_b32_m(...) __riscv_vmfne_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfne_vf_f32m2_b16(...) __riscv_vmfne_vf_f32m2_b16(__VA_ARGS__) |
| #define | vmfne_vf_f32m2_b16_m(...) __riscv_vmfne_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfne_vf_f32m4_b8(...) __riscv_vmfne_vf_f32m4_b8(__VA_ARGS__) |
| #define | vmfne_vf_f32m4_b8_m(...) __riscv_vmfne_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfne_vf_f32m8_b4(...) __riscv_vmfne_vf_f32m8_b4(__VA_ARGS__) |
| #define | vmfne_vf_f32m8_b4_m(...) __riscv_vmfne_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfne_vf_f32mf2_b64(...) __riscv_vmfne_vf_f32mf2_b64(__VA_ARGS__) |
| #define | vmfne_vf_f32mf2_b64_m(...) __riscv_vmfne_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfne_vf_f64m1_b64(...) __riscv_vmfne_vf_f64m1_b64(__VA_ARGS__) |
| #define | vmfne_vf_f64m1_b64_m(...) __riscv_vmfne_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfne_vf_f64m2_b32(...) __riscv_vmfne_vf_f64m2_b32(__VA_ARGS__) |
| #define | vmfne_vf_f64m2_b32_m(...) __riscv_vmfne_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfne_vf_f64m4_b16(...) __riscv_vmfne_vf_f64m4_b16(__VA_ARGS__) |
| #define | vmfne_vf_f64m4_b16_m(...) __riscv_vmfne_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfne_vf_f64m8_b8(...) __riscv_vmfne_vf_f64m8_b8(__VA_ARGS__) |
| #define | vmfne_vf_f64m8_b8_m(...) __riscv_vmfne_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmfne_vv_f16m1_b16(...) __riscv_vmfne_vv_f16m1_b16(__VA_ARGS__) |
| #define | vmfne_vv_f16m1_b16_m(...) __riscv_vmfne_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define | vmfne_vv_f16m2_b8(...) __riscv_vmfne_vv_f16m2_b8(__VA_ARGS__) |
| #define | vmfne_vv_f16m2_b8_m(...) __riscv_vmfne_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define | vmfne_vv_f16m4_b4(...) __riscv_vmfne_vv_f16m4_b4(__VA_ARGS__) |
| #define | vmfne_vv_f16m4_b4_m(...) __riscv_vmfne_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define | vmfne_vv_f16m8_b2(...) __riscv_vmfne_vv_f16m8_b2(__VA_ARGS__) |
| #define | vmfne_vv_f16m8_b2_m(...) __riscv_vmfne_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define | vmfne_vv_f16mf2_b32(...) __riscv_vmfne_vv_f16mf2_b32(__VA_ARGS__) |
| #define | vmfne_vv_f16mf2_b32_m(...) __riscv_vmfne_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define | vmfne_vv_f16mf4_b64(...) __riscv_vmfne_vv_f16mf4_b64(__VA_ARGS__) |
| #define | vmfne_vv_f16mf4_b64_m(...) __riscv_vmfne_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define | vmfne_vv_f32m1_b32(...) __riscv_vmfne_vv_f32m1_b32(__VA_ARGS__) |
| #define | vmfne_vv_f32m1_b32_m(...) __riscv_vmfne_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define | vmfne_vv_f32m2_b16(...) __riscv_vmfne_vv_f32m2_b16(__VA_ARGS__) |
| #define | vmfne_vv_f32m2_b16_m(...) __riscv_vmfne_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define | vmfne_vv_f32m4_b8(...) __riscv_vmfne_vv_f32m4_b8(__VA_ARGS__) |
| #define | vmfne_vv_f32m4_b8_m(...) __riscv_vmfne_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define | vmfne_vv_f32m8_b4(...) __riscv_vmfne_vv_f32m8_b4(__VA_ARGS__) |
| #define | vmfne_vv_f32m8_b4_m(...) __riscv_vmfne_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define | vmfne_vv_f32mf2_b64(...) __riscv_vmfne_vv_f32mf2_b64(__VA_ARGS__) |
| #define | vmfne_vv_f32mf2_b64_m(...) __riscv_vmfne_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define | vmfne_vv_f64m1_b64(...) __riscv_vmfne_vv_f64m1_b64(__VA_ARGS__) |
| #define | vmfne_vv_f64m1_b64_m(...) __riscv_vmfne_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define | vmfne_vv_f64m2_b32(...) __riscv_vmfne_vv_f64m2_b32(__VA_ARGS__) |
| #define | vmfne_vv_f64m2_b32_m(...) __riscv_vmfne_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define | vmfne_vv_f64m4_b16(...) __riscv_vmfne_vv_f64m4_b16(__VA_ARGS__) |
| #define | vmfne_vv_f64m4_b16_m(...) __riscv_vmfne_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define | vmfne_vv_f64m8_b8(...) __riscv_vmfne_vv_f64m8_b8(__VA_ARGS__) |
| #define | vmfne_vv_f64m8_b8_m(...) __riscv_vmfne_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define | vmin_vv_i16m1(...) __riscv_vmin_vv_i16m1(__VA_ARGS__) |
| #define | vmin_vv_i16m1_m(...) __riscv_vmin_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vmin_vv_i16m2(...) __riscv_vmin_vv_i16m2(__VA_ARGS__) |
| #define | vmin_vv_i16m2_m(...) __riscv_vmin_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vmin_vv_i16m4(...) __riscv_vmin_vv_i16m4(__VA_ARGS__) |
| #define | vmin_vv_i16m4_m(...) __riscv_vmin_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vmin_vv_i16m8(...) __riscv_vmin_vv_i16m8(__VA_ARGS__) |
| #define | vmin_vv_i16m8_m(...) __riscv_vmin_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vmin_vv_i16mf2(...) __riscv_vmin_vv_i16mf2(__VA_ARGS__) |
| #define | vmin_vv_i16mf2_m(...) __riscv_vmin_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vmin_vv_i16mf4(...) __riscv_vmin_vv_i16mf4(__VA_ARGS__) |
| #define | vmin_vv_i16mf4_m(...) __riscv_vmin_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vmin_vv_i32m1(...) __riscv_vmin_vv_i32m1(__VA_ARGS__) |
| #define | vmin_vv_i32m1_m(...) __riscv_vmin_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vmin_vv_i32m2(...) __riscv_vmin_vv_i32m2(__VA_ARGS__) |
| #define | vmin_vv_i32m2_m(...) __riscv_vmin_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vmin_vv_i32m4(...) __riscv_vmin_vv_i32m4(__VA_ARGS__) |
| #define | vmin_vv_i32m4_m(...) __riscv_vmin_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vmin_vv_i32m8(...) __riscv_vmin_vv_i32m8(__VA_ARGS__) |
| #define | vmin_vv_i32m8_m(...) __riscv_vmin_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vmin_vv_i32mf2(...) __riscv_vmin_vv_i32mf2(__VA_ARGS__) |
| #define | vmin_vv_i32mf2_m(...) __riscv_vmin_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vmin_vv_i64m1(...) __riscv_vmin_vv_i64m1(__VA_ARGS__) |
| #define | vmin_vv_i64m1_m(...) __riscv_vmin_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vmin_vv_i64m2(...) __riscv_vmin_vv_i64m2(__VA_ARGS__) |
| #define | vmin_vv_i64m2_m(...) __riscv_vmin_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vmin_vv_i64m4(...) __riscv_vmin_vv_i64m4(__VA_ARGS__) |
| #define | vmin_vv_i64m4_m(...) __riscv_vmin_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vmin_vv_i64m8(...) __riscv_vmin_vv_i64m8(__VA_ARGS__) |
| #define | vmin_vv_i64m8_m(...) __riscv_vmin_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vmin_vv_i8m1(...) __riscv_vmin_vv_i8m1(__VA_ARGS__) |
| #define | vmin_vv_i8m1_m(...) __riscv_vmin_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vmin_vv_i8m2(...) __riscv_vmin_vv_i8m2(__VA_ARGS__) |
| #define | vmin_vv_i8m2_m(...) __riscv_vmin_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vmin_vv_i8m4(...) __riscv_vmin_vv_i8m4(__VA_ARGS__) |
| #define | vmin_vv_i8m4_m(...) __riscv_vmin_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vmin_vv_i8m8(...) __riscv_vmin_vv_i8m8(__VA_ARGS__) |
| #define | vmin_vv_i8m8_m(...) __riscv_vmin_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vmin_vv_i8mf2(...) __riscv_vmin_vv_i8mf2(__VA_ARGS__) |
| #define | vmin_vv_i8mf2_m(...) __riscv_vmin_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vmin_vv_i8mf4(...) __riscv_vmin_vv_i8mf4(__VA_ARGS__) |
| #define | vmin_vv_i8mf4_m(...) __riscv_vmin_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vmin_vv_i8mf8(...) __riscv_vmin_vv_i8mf8(__VA_ARGS__) |
| #define | vmin_vv_i8mf8_m(...) __riscv_vmin_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vmin_vx_i16m1(...) __riscv_vmin_vx_i16m1(__VA_ARGS__) |
| #define | vmin_vx_i16m1_m(...) __riscv_vmin_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vmin_vx_i16m2(...) __riscv_vmin_vx_i16m2(__VA_ARGS__) |
| #define | vmin_vx_i16m2_m(...) __riscv_vmin_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vmin_vx_i16m4(...) __riscv_vmin_vx_i16m4(__VA_ARGS__) |
| #define | vmin_vx_i16m4_m(...) __riscv_vmin_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vmin_vx_i16m8(...) __riscv_vmin_vx_i16m8(__VA_ARGS__) |
| #define | vmin_vx_i16m8_m(...) __riscv_vmin_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vmin_vx_i16mf2(...) __riscv_vmin_vx_i16mf2(__VA_ARGS__) |
| #define | vmin_vx_i16mf2_m(...) __riscv_vmin_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vmin_vx_i16mf4(...) __riscv_vmin_vx_i16mf4(__VA_ARGS__) |
| #define | vmin_vx_i16mf4_m(...) __riscv_vmin_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vmin_vx_i32m1(...) __riscv_vmin_vx_i32m1(__VA_ARGS__) |
| #define | vmin_vx_i32m1_m(...) __riscv_vmin_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vmin_vx_i32m2(...) __riscv_vmin_vx_i32m2(__VA_ARGS__) |
| #define | vmin_vx_i32m2_m(...) __riscv_vmin_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vmin_vx_i32m4(...) __riscv_vmin_vx_i32m4(__VA_ARGS__) |
| #define | vmin_vx_i32m4_m(...) __riscv_vmin_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vmin_vx_i32m8(...) __riscv_vmin_vx_i32m8(__VA_ARGS__) |
| #define | vmin_vx_i32m8_m(...) __riscv_vmin_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vmin_vx_i32mf2(...) __riscv_vmin_vx_i32mf2(__VA_ARGS__) |
| #define | vmin_vx_i32mf2_m(...) __riscv_vmin_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vmin_vx_i64m1(...) __riscv_vmin_vx_i64m1(__VA_ARGS__) |
| #define | vmin_vx_i64m1_m(...) __riscv_vmin_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vmin_vx_i64m2(...) __riscv_vmin_vx_i64m2(__VA_ARGS__) |
| #define | vmin_vx_i64m2_m(...) __riscv_vmin_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vmin_vx_i64m4(...) __riscv_vmin_vx_i64m4(__VA_ARGS__) |
| #define | vmin_vx_i64m4_m(...) __riscv_vmin_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vmin_vx_i64m8(...) __riscv_vmin_vx_i64m8(__VA_ARGS__) |
| #define | vmin_vx_i64m8_m(...) __riscv_vmin_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vmin_vx_i8m1(...) __riscv_vmin_vx_i8m1(__VA_ARGS__) |
| #define | vmin_vx_i8m1_m(...) __riscv_vmin_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vmin_vx_i8m2(...) __riscv_vmin_vx_i8m2(__VA_ARGS__) |
| #define | vmin_vx_i8m2_m(...) __riscv_vmin_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vmin_vx_i8m4(...) __riscv_vmin_vx_i8m4(__VA_ARGS__) |
| #define | vmin_vx_i8m4_m(...) __riscv_vmin_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vmin_vx_i8m8(...) __riscv_vmin_vx_i8m8(__VA_ARGS__) |
| #define | vmin_vx_i8m8_m(...) __riscv_vmin_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vmin_vx_i8mf2(...) __riscv_vmin_vx_i8mf2(__VA_ARGS__) |
| #define | vmin_vx_i8mf2_m(...) __riscv_vmin_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vmin_vx_i8mf4(...) __riscv_vmin_vx_i8mf4(__VA_ARGS__) |
| #define | vmin_vx_i8mf4_m(...) __riscv_vmin_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vmin_vx_i8mf8(...) __riscv_vmin_vx_i8mf8(__VA_ARGS__) |
| #define | vmin_vx_i8mf8_m(...) __riscv_vmin_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vminu_vv_u16m1(...) __riscv_vminu_vv_u16m1(__VA_ARGS__) |
| #define | vminu_vv_u16m1_m(...) __riscv_vminu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vminu_vv_u16m2(...) __riscv_vminu_vv_u16m2(__VA_ARGS__) |
| #define | vminu_vv_u16m2_m(...) __riscv_vminu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vminu_vv_u16m4(...) __riscv_vminu_vv_u16m4(__VA_ARGS__) |
| #define | vminu_vv_u16m4_m(...) __riscv_vminu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vminu_vv_u16m8(...) __riscv_vminu_vv_u16m8(__VA_ARGS__) |
| #define | vminu_vv_u16m8_m(...) __riscv_vminu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vminu_vv_u16mf2(...) __riscv_vminu_vv_u16mf2(__VA_ARGS__) |
| #define | vminu_vv_u16mf2_m(...) __riscv_vminu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vminu_vv_u16mf4(...) __riscv_vminu_vv_u16mf4(__VA_ARGS__) |
| #define | vminu_vv_u16mf4_m(...) __riscv_vminu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vminu_vv_u32m1(...) __riscv_vminu_vv_u32m1(__VA_ARGS__) |
| #define | vminu_vv_u32m1_m(...) __riscv_vminu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vminu_vv_u32m2(...) __riscv_vminu_vv_u32m2(__VA_ARGS__) |
| #define | vminu_vv_u32m2_m(...) __riscv_vminu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vminu_vv_u32m4(...) __riscv_vminu_vv_u32m4(__VA_ARGS__) |
| #define | vminu_vv_u32m4_m(...) __riscv_vminu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vminu_vv_u32m8(...) __riscv_vminu_vv_u32m8(__VA_ARGS__) |
| #define | vminu_vv_u32m8_m(...) __riscv_vminu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vminu_vv_u32mf2(...) __riscv_vminu_vv_u32mf2(__VA_ARGS__) |
| #define | vminu_vv_u32mf2_m(...) __riscv_vminu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vminu_vv_u64m1(...) __riscv_vminu_vv_u64m1(__VA_ARGS__) |
| #define | vminu_vv_u64m1_m(...) __riscv_vminu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vminu_vv_u64m2(...) __riscv_vminu_vv_u64m2(__VA_ARGS__) |
| #define | vminu_vv_u64m2_m(...) __riscv_vminu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vminu_vv_u64m4(...) __riscv_vminu_vv_u64m4(__VA_ARGS__) |
| #define | vminu_vv_u64m4_m(...) __riscv_vminu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vminu_vv_u64m8(...) __riscv_vminu_vv_u64m8(__VA_ARGS__) |
| #define | vminu_vv_u64m8_m(...) __riscv_vminu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vminu_vv_u8m1(...) __riscv_vminu_vv_u8m1(__VA_ARGS__) |
| #define | vminu_vv_u8m1_m(...) __riscv_vminu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vminu_vv_u8m2(...) __riscv_vminu_vv_u8m2(__VA_ARGS__) |
| #define | vminu_vv_u8m2_m(...) __riscv_vminu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vminu_vv_u8m4(...) __riscv_vminu_vv_u8m4(__VA_ARGS__) |
| #define | vminu_vv_u8m4_m(...) __riscv_vminu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vminu_vv_u8m8(...) __riscv_vminu_vv_u8m8(__VA_ARGS__) |
| #define | vminu_vv_u8m8_m(...) __riscv_vminu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vminu_vv_u8mf2(...) __riscv_vminu_vv_u8mf2(__VA_ARGS__) |
| #define | vminu_vv_u8mf2_m(...) __riscv_vminu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vminu_vv_u8mf4(...) __riscv_vminu_vv_u8mf4(__VA_ARGS__) |
| #define | vminu_vv_u8mf4_m(...) __riscv_vminu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vminu_vv_u8mf8(...) __riscv_vminu_vv_u8mf8(__VA_ARGS__) |
| #define | vminu_vv_u8mf8_m(...) __riscv_vminu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vminu_vx_u16m1(...) __riscv_vminu_vx_u16m1(__VA_ARGS__) |
| #define | vminu_vx_u16m1_m(...) __riscv_vminu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vminu_vx_u16m2(...) __riscv_vminu_vx_u16m2(__VA_ARGS__) |
| #define | vminu_vx_u16m2_m(...) __riscv_vminu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vminu_vx_u16m4(...) __riscv_vminu_vx_u16m4(__VA_ARGS__) |
| #define | vminu_vx_u16m4_m(...) __riscv_vminu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vminu_vx_u16m8(...) __riscv_vminu_vx_u16m8(__VA_ARGS__) |
| #define | vminu_vx_u16m8_m(...) __riscv_vminu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vminu_vx_u16mf2(...) __riscv_vminu_vx_u16mf2(__VA_ARGS__) |
| #define | vminu_vx_u16mf2_m(...) __riscv_vminu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vminu_vx_u16mf4(...) __riscv_vminu_vx_u16mf4(__VA_ARGS__) |
| #define | vminu_vx_u16mf4_m(...) __riscv_vminu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vminu_vx_u32m1(...) __riscv_vminu_vx_u32m1(__VA_ARGS__) |
| #define | vminu_vx_u32m1_m(...) __riscv_vminu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vminu_vx_u32m2(...) __riscv_vminu_vx_u32m2(__VA_ARGS__) |
| #define | vminu_vx_u32m2_m(...) __riscv_vminu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vminu_vx_u32m4(...) __riscv_vminu_vx_u32m4(__VA_ARGS__) |
| #define | vminu_vx_u32m4_m(...) __riscv_vminu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vminu_vx_u32m8(...) __riscv_vminu_vx_u32m8(__VA_ARGS__) |
| #define | vminu_vx_u32m8_m(...) __riscv_vminu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vminu_vx_u32mf2(...) __riscv_vminu_vx_u32mf2(__VA_ARGS__) |
| #define | vminu_vx_u32mf2_m(...) __riscv_vminu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vminu_vx_u64m1(...) __riscv_vminu_vx_u64m1(__VA_ARGS__) |
| #define | vminu_vx_u64m1_m(...) __riscv_vminu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vminu_vx_u64m2(...) __riscv_vminu_vx_u64m2(__VA_ARGS__) |
| #define | vminu_vx_u64m2_m(...) __riscv_vminu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vminu_vx_u64m4(...) __riscv_vminu_vx_u64m4(__VA_ARGS__) |
| #define | vminu_vx_u64m4_m(...) __riscv_vminu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vminu_vx_u64m8(...) __riscv_vminu_vx_u64m8(__VA_ARGS__) |
| #define | vminu_vx_u64m8_m(...) __riscv_vminu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vminu_vx_u8m1(...) __riscv_vminu_vx_u8m1(__VA_ARGS__) |
| #define | vminu_vx_u8m1_m(...) __riscv_vminu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vminu_vx_u8m2(...) __riscv_vminu_vx_u8m2(__VA_ARGS__) |
| #define | vminu_vx_u8m2_m(...) __riscv_vminu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vminu_vx_u8m4(...) __riscv_vminu_vx_u8m4(__VA_ARGS__) |
| #define | vminu_vx_u8m4_m(...) __riscv_vminu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vminu_vx_u8m8(...) __riscv_vminu_vx_u8m8(__VA_ARGS__) |
| #define | vminu_vx_u8m8_m(...) __riscv_vminu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vminu_vx_u8mf2(...) __riscv_vminu_vx_u8mf2(__VA_ARGS__) |
| #define | vminu_vx_u8mf2_m(...) __riscv_vminu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vminu_vx_u8mf4(...) __riscv_vminu_vx_u8mf4(__VA_ARGS__) |
| #define | vminu_vx_u8mf4_m(...) __riscv_vminu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vminu_vx_u8mf8(...) __riscv_vminu_vx_u8mf8(__VA_ARGS__) |
| #define | vminu_vx_u8mf8_m(...) __riscv_vminu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vmmv_m_b1(...) __riscv_vmmv_m_b1(__VA_ARGS__) |
| #define | vmmv_m_b16(...) __riscv_vmmv_m_b16(__VA_ARGS__) |
| #define | vmmv_m_b2(...) __riscv_vmmv_m_b2(__VA_ARGS__) |
| #define | vmmv_m_b32(...) __riscv_vmmv_m_b32(__VA_ARGS__) |
| #define | vmmv_m_b4(...) __riscv_vmmv_m_b4(__VA_ARGS__) |
| #define | vmmv_m_b64(...) __riscv_vmmv_m_b64(__VA_ARGS__) |
| #define | vmmv_m_b8(...) __riscv_vmmv_m_b8(__VA_ARGS__) |
| #define | vmnand_mm_b1(...) __riscv_vmnand_mm_b1(__VA_ARGS__) |
| #define | vmnand_mm_b16(...) __riscv_vmnand_mm_b16(__VA_ARGS__) |
| #define | vmnand_mm_b2(...) __riscv_vmnand_mm_b2(__VA_ARGS__) |
| #define | vmnand_mm_b32(...) __riscv_vmnand_mm_b32(__VA_ARGS__) |
| #define | vmnand_mm_b4(...) __riscv_vmnand_mm_b4(__VA_ARGS__) |
| #define | vmnand_mm_b64(...) __riscv_vmnand_mm_b64(__VA_ARGS__) |
| #define | vmnand_mm_b8(...) __riscv_vmnand_mm_b8(__VA_ARGS__) |
| #define | vmnor_mm_b1(...) __riscv_vmnor_mm_b1(__VA_ARGS__) |
| #define | vmnor_mm_b16(...) __riscv_vmnor_mm_b16(__VA_ARGS__) |
| #define | vmnor_mm_b2(...) __riscv_vmnor_mm_b2(__VA_ARGS__) |
| #define | vmnor_mm_b32(...) __riscv_vmnor_mm_b32(__VA_ARGS__) |
| #define | vmnor_mm_b4(...) __riscv_vmnor_mm_b4(__VA_ARGS__) |
| #define | vmnor_mm_b64(...) __riscv_vmnor_mm_b64(__VA_ARGS__) |
| #define | vmnor_mm_b8(...) __riscv_vmnor_mm_b8(__VA_ARGS__) |
| #define | vmnot_m_b1(...) __riscv_vmnot_m_b1(__VA_ARGS__) |
| #define | vmnot_m_b16(...) __riscv_vmnot_m_b16(__VA_ARGS__) |
| #define | vmnot_m_b2(...) __riscv_vmnot_m_b2(__VA_ARGS__) |
| #define | vmnot_m_b32(...) __riscv_vmnot_m_b32(__VA_ARGS__) |
| #define | vmnot_m_b4(...) __riscv_vmnot_m_b4(__VA_ARGS__) |
| #define | vmnot_m_b64(...) __riscv_vmnot_m_b64(__VA_ARGS__) |
| #define | vmnot_m_b8(...) __riscv_vmnot_m_b8(__VA_ARGS__) |
| #define | vmor_mm_b1(...) __riscv_vmor_mm_b1(__VA_ARGS__) |
| #define | vmor_mm_b16(...) __riscv_vmor_mm_b16(__VA_ARGS__) |
| #define | vmor_mm_b2(...) __riscv_vmor_mm_b2(__VA_ARGS__) |
| #define | vmor_mm_b32(...) __riscv_vmor_mm_b32(__VA_ARGS__) |
| #define | vmor_mm_b4(...) __riscv_vmor_mm_b4(__VA_ARGS__) |
| #define | vmor_mm_b64(...) __riscv_vmor_mm_b64(__VA_ARGS__) |
| #define | vmor_mm_b8(...) __riscv_vmor_mm_b8(__VA_ARGS__) |
| #define | vmorn_mm_b1(...) __riscv_vmorn_mm_b1(__VA_ARGS__) |
| #define | vmorn_mm_b16(...) __riscv_vmorn_mm_b16(__VA_ARGS__) |
| #define | vmorn_mm_b2(...) __riscv_vmorn_mm_b2(__VA_ARGS__) |
| #define | vmorn_mm_b32(...) __riscv_vmorn_mm_b32(__VA_ARGS__) |
| #define | vmorn_mm_b4(...) __riscv_vmorn_mm_b4(__VA_ARGS__) |
| #define | vmorn_mm_b64(...) __riscv_vmorn_mm_b64(__VA_ARGS__) |
| #define | vmorn_mm_b8(...) __riscv_vmorn_mm_b8(__VA_ARGS__) |
| #define | vmsbc_vv_i16m1_b16(...) __riscv_vmsbc_vv_i16m1_b16(__VA_ARGS__) |
| #define | vmsbc_vv_i16m2_b8(...) __riscv_vmsbc_vv_i16m2_b8(__VA_ARGS__) |
| #define | vmsbc_vv_i16m4_b4(...) __riscv_vmsbc_vv_i16m4_b4(__VA_ARGS__) |
| #define | vmsbc_vv_i16m8_b2(...) __riscv_vmsbc_vv_i16m8_b2(__VA_ARGS__) |
| #define | vmsbc_vv_i16mf2_b32(...) __riscv_vmsbc_vv_i16mf2_b32(__VA_ARGS__) |
| #define | vmsbc_vv_i16mf4_b64(...) __riscv_vmsbc_vv_i16mf4_b64(__VA_ARGS__) |
| #define | vmsbc_vv_i32m1_b32(...) __riscv_vmsbc_vv_i32m1_b32(__VA_ARGS__) |
| #define | vmsbc_vv_i32m2_b16(...) __riscv_vmsbc_vv_i32m2_b16(__VA_ARGS__) |
| #define | vmsbc_vv_i32m4_b8(...) __riscv_vmsbc_vv_i32m4_b8(__VA_ARGS__) |
| #define | vmsbc_vv_i32m8_b4(...) __riscv_vmsbc_vv_i32m8_b4(__VA_ARGS__) |
| #define | vmsbc_vv_i32mf2_b64(...) __riscv_vmsbc_vv_i32mf2_b64(__VA_ARGS__) |
| #define | vmsbc_vv_i64m1_b64(...) __riscv_vmsbc_vv_i64m1_b64(__VA_ARGS__) |
| #define | vmsbc_vv_i64m2_b32(...) __riscv_vmsbc_vv_i64m2_b32(__VA_ARGS__) |
| #define | vmsbc_vv_i64m4_b16(...) __riscv_vmsbc_vv_i64m4_b16(__VA_ARGS__) |
| #define | vmsbc_vv_i64m8_b8(...) __riscv_vmsbc_vv_i64m8_b8(__VA_ARGS__) |
| #define | vmsbc_vv_i8m1_b8(...) __riscv_vmsbc_vv_i8m1_b8(__VA_ARGS__) |
| #define | vmsbc_vv_i8m2_b4(...) __riscv_vmsbc_vv_i8m2_b4(__VA_ARGS__) |
| #define | vmsbc_vv_i8m4_b2(...) __riscv_vmsbc_vv_i8m4_b2(__VA_ARGS__) |
| #define | vmsbc_vv_i8m8_b1(...) __riscv_vmsbc_vv_i8m8_b1(__VA_ARGS__) |
| #define | vmsbc_vv_i8mf2_b16(...) __riscv_vmsbc_vv_i8mf2_b16(__VA_ARGS__) |
| #define | vmsbc_vv_i8mf4_b32(...) __riscv_vmsbc_vv_i8mf4_b32(__VA_ARGS__) |
| #define | vmsbc_vv_i8mf8_b64(...) __riscv_vmsbc_vv_i8mf8_b64(__VA_ARGS__) |
| #define | vmsbc_vv_u16m1_b16(...) __riscv_vmsbc_vv_u16m1_b16(__VA_ARGS__) |
| #define | vmsbc_vv_u16m2_b8(...) __riscv_vmsbc_vv_u16m2_b8(__VA_ARGS__) |
| #define | vmsbc_vv_u16m4_b4(...) __riscv_vmsbc_vv_u16m4_b4(__VA_ARGS__) |
| #define | vmsbc_vv_u16m8_b2(...) __riscv_vmsbc_vv_u16m8_b2(__VA_ARGS__) |
| #define | vmsbc_vv_u16mf2_b32(...) __riscv_vmsbc_vv_u16mf2_b32(__VA_ARGS__) |
| #define | vmsbc_vv_u16mf4_b64(...) __riscv_vmsbc_vv_u16mf4_b64(__VA_ARGS__) |
| #define | vmsbc_vv_u32m1_b32(...) __riscv_vmsbc_vv_u32m1_b32(__VA_ARGS__) |
| #define | vmsbc_vv_u32m2_b16(...) __riscv_vmsbc_vv_u32m2_b16(__VA_ARGS__) |
| #define | vmsbc_vv_u32m4_b8(...) __riscv_vmsbc_vv_u32m4_b8(__VA_ARGS__) |
| #define | vmsbc_vv_u32m8_b4(...) __riscv_vmsbc_vv_u32m8_b4(__VA_ARGS__) |
| #define | vmsbc_vv_u32mf2_b64(...) __riscv_vmsbc_vv_u32mf2_b64(__VA_ARGS__) |
| #define | vmsbc_vv_u64m1_b64(...) __riscv_vmsbc_vv_u64m1_b64(__VA_ARGS__) |
| #define | vmsbc_vv_u64m2_b32(...) __riscv_vmsbc_vv_u64m2_b32(__VA_ARGS__) |
| #define | vmsbc_vv_u64m4_b16(...) __riscv_vmsbc_vv_u64m4_b16(__VA_ARGS__) |
| #define | vmsbc_vv_u64m8_b8(...) __riscv_vmsbc_vv_u64m8_b8(__VA_ARGS__) |
| #define | vmsbc_vv_u8m1_b8(...) __riscv_vmsbc_vv_u8m1_b8(__VA_ARGS__) |
| #define | vmsbc_vv_u8m2_b4(...) __riscv_vmsbc_vv_u8m2_b4(__VA_ARGS__) |
| #define | vmsbc_vv_u8m4_b2(...) __riscv_vmsbc_vv_u8m4_b2(__VA_ARGS__) |
| #define | vmsbc_vv_u8m8_b1(...) __riscv_vmsbc_vv_u8m8_b1(__VA_ARGS__) |
| #define | vmsbc_vv_u8mf2_b16(...) __riscv_vmsbc_vv_u8mf2_b16(__VA_ARGS__) |
| #define | vmsbc_vv_u8mf4_b32(...) __riscv_vmsbc_vv_u8mf4_b32(__VA_ARGS__) |
| #define | vmsbc_vv_u8mf8_b64(...) __riscv_vmsbc_vv_u8mf8_b64(__VA_ARGS__) |
| #define | vmsbc_vvm_i16m1_b16(...) __riscv_vmsbc_vvm_i16m1_b16(__VA_ARGS__) |
| #define | vmsbc_vvm_i16m2_b8(...) __riscv_vmsbc_vvm_i16m2_b8(__VA_ARGS__) |
| #define | vmsbc_vvm_i16m4_b4(...) __riscv_vmsbc_vvm_i16m4_b4(__VA_ARGS__) |
| #define | vmsbc_vvm_i16m8_b2(...) __riscv_vmsbc_vvm_i16m8_b2(__VA_ARGS__) |
| #define | vmsbc_vvm_i16mf2_b32(...) __riscv_vmsbc_vvm_i16mf2_b32(__VA_ARGS__) |
| #define | vmsbc_vvm_i16mf4_b64(...) __riscv_vmsbc_vvm_i16mf4_b64(__VA_ARGS__) |
| #define | vmsbc_vvm_i32m1_b32(...) __riscv_vmsbc_vvm_i32m1_b32(__VA_ARGS__) |
| #define | vmsbc_vvm_i32m2_b16(...) __riscv_vmsbc_vvm_i32m2_b16(__VA_ARGS__) |
| #define | vmsbc_vvm_i32m4_b8(...) __riscv_vmsbc_vvm_i32m4_b8(__VA_ARGS__) |
| #define | vmsbc_vvm_i32m8_b4(...) __riscv_vmsbc_vvm_i32m8_b4(__VA_ARGS__) |
| #define | vmsbc_vvm_i32mf2_b64(...) __riscv_vmsbc_vvm_i32mf2_b64(__VA_ARGS__) |
| #define | vmsbc_vvm_i64m1_b64(...) __riscv_vmsbc_vvm_i64m1_b64(__VA_ARGS__) |
| #define | vmsbc_vvm_i64m2_b32(...) __riscv_vmsbc_vvm_i64m2_b32(__VA_ARGS__) |
| #define | vmsbc_vvm_i64m4_b16(...) __riscv_vmsbc_vvm_i64m4_b16(__VA_ARGS__) |
| #define | vmsbc_vvm_i64m8_b8(...) __riscv_vmsbc_vvm_i64m8_b8(__VA_ARGS__) |
| #define | vmsbc_vvm_i8m1_b8(...) __riscv_vmsbc_vvm_i8m1_b8(__VA_ARGS__) |
| #define | vmsbc_vvm_i8m2_b4(...) __riscv_vmsbc_vvm_i8m2_b4(__VA_ARGS__) |
| #define | vmsbc_vvm_i8m4_b2(...) __riscv_vmsbc_vvm_i8m4_b2(__VA_ARGS__) |
| #define | vmsbc_vvm_i8m8_b1(...) __riscv_vmsbc_vvm_i8m8_b1(__VA_ARGS__) |
| #define | vmsbc_vvm_i8mf2_b16(...) __riscv_vmsbc_vvm_i8mf2_b16(__VA_ARGS__) |
| #define | vmsbc_vvm_i8mf4_b32(...) __riscv_vmsbc_vvm_i8mf4_b32(__VA_ARGS__) |
| #define | vmsbc_vvm_i8mf8_b64(...) __riscv_vmsbc_vvm_i8mf8_b64(__VA_ARGS__) |
| #define | vmsbc_vvm_u16m1_b16(...) __riscv_vmsbc_vvm_u16m1_b16(__VA_ARGS__) |
| #define | vmsbc_vvm_u16m2_b8(...) __riscv_vmsbc_vvm_u16m2_b8(__VA_ARGS__) |
| #define | vmsbc_vvm_u16m4_b4(...) __riscv_vmsbc_vvm_u16m4_b4(__VA_ARGS__) |
| #define | vmsbc_vvm_u16m8_b2(...) __riscv_vmsbc_vvm_u16m8_b2(__VA_ARGS__) |
| #define | vmsbc_vvm_u16mf2_b32(...) __riscv_vmsbc_vvm_u16mf2_b32(__VA_ARGS__) |
| #define | vmsbc_vvm_u16mf4_b64(...) __riscv_vmsbc_vvm_u16mf4_b64(__VA_ARGS__) |
| #define | vmsbc_vvm_u32m1_b32(...) __riscv_vmsbc_vvm_u32m1_b32(__VA_ARGS__) |
| #define | vmsbc_vvm_u32m2_b16(...) __riscv_vmsbc_vvm_u32m2_b16(__VA_ARGS__) |
| #define | vmsbc_vvm_u32m4_b8(...) __riscv_vmsbc_vvm_u32m4_b8(__VA_ARGS__) |
| #define | vmsbc_vvm_u32m8_b4(...) __riscv_vmsbc_vvm_u32m8_b4(__VA_ARGS__) |
| #define | vmsbc_vvm_u32mf2_b64(...) __riscv_vmsbc_vvm_u32mf2_b64(__VA_ARGS__) |
| #define | vmsbc_vvm_u64m1_b64(...) __riscv_vmsbc_vvm_u64m1_b64(__VA_ARGS__) |
| #define | vmsbc_vvm_u64m2_b32(...) __riscv_vmsbc_vvm_u64m2_b32(__VA_ARGS__) |
| #define | vmsbc_vvm_u64m4_b16(...) __riscv_vmsbc_vvm_u64m4_b16(__VA_ARGS__) |
| #define | vmsbc_vvm_u64m8_b8(...) __riscv_vmsbc_vvm_u64m8_b8(__VA_ARGS__) |
| #define | vmsbc_vvm_u8m1_b8(...) __riscv_vmsbc_vvm_u8m1_b8(__VA_ARGS__) |
| #define | vmsbc_vvm_u8m2_b4(...) __riscv_vmsbc_vvm_u8m2_b4(__VA_ARGS__) |
| #define | vmsbc_vvm_u8m4_b2(...) __riscv_vmsbc_vvm_u8m4_b2(__VA_ARGS__) |
| #define | vmsbc_vvm_u8m8_b1(...) __riscv_vmsbc_vvm_u8m8_b1(__VA_ARGS__) |
| #define | vmsbc_vvm_u8mf2_b16(...) __riscv_vmsbc_vvm_u8mf2_b16(__VA_ARGS__) |
| #define | vmsbc_vvm_u8mf4_b32(...) __riscv_vmsbc_vvm_u8mf4_b32(__VA_ARGS__) |
| #define | vmsbc_vvm_u8mf8_b64(...) __riscv_vmsbc_vvm_u8mf8_b64(__VA_ARGS__) |
| #define | vmsbc_vx_i16m1_b16(...) __riscv_vmsbc_vx_i16m1_b16(__VA_ARGS__) |
| #define | vmsbc_vx_i16m2_b8(...) __riscv_vmsbc_vx_i16m2_b8(__VA_ARGS__) |
| #define | vmsbc_vx_i16m4_b4(...) __riscv_vmsbc_vx_i16m4_b4(__VA_ARGS__) |
| #define | vmsbc_vx_i16m8_b2(...) __riscv_vmsbc_vx_i16m8_b2(__VA_ARGS__) |
| #define | vmsbc_vx_i16mf2_b32(...) __riscv_vmsbc_vx_i16mf2_b32(__VA_ARGS__) |
| #define | vmsbc_vx_i16mf4_b64(...) __riscv_vmsbc_vx_i16mf4_b64(__VA_ARGS__) |
| #define | vmsbc_vx_i32m1_b32(...) __riscv_vmsbc_vx_i32m1_b32(__VA_ARGS__) |
| #define | vmsbc_vx_i32m2_b16(...) __riscv_vmsbc_vx_i32m2_b16(__VA_ARGS__) |
| #define | vmsbc_vx_i32m4_b8(...) __riscv_vmsbc_vx_i32m4_b8(__VA_ARGS__) |
| #define | vmsbc_vx_i32m8_b4(...) __riscv_vmsbc_vx_i32m8_b4(__VA_ARGS__) |
| #define | vmsbc_vx_i32mf2_b64(...) __riscv_vmsbc_vx_i32mf2_b64(__VA_ARGS__) |
| #define | vmsbc_vx_i64m1_b64(...) __riscv_vmsbc_vx_i64m1_b64(__VA_ARGS__) |
| #define | vmsbc_vx_i64m2_b32(...) __riscv_vmsbc_vx_i64m2_b32(__VA_ARGS__) |
| #define | vmsbc_vx_i64m4_b16(...) __riscv_vmsbc_vx_i64m4_b16(__VA_ARGS__) |
| #define | vmsbc_vx_i64m8_b8(...) __riscv_vmsbc_vx_i64m8_b8(__VA_ARGS__) |
| #define | vmsbc_vx_i8m1_b8(...) __riscv_vmsbc_vx_i8m1_b8(__VA_ARGS__) |
| #define | vmsbc_vx_i8m2_b4(...) __riscv_vmsbc_vx_i8m2_b4(__VA_ARGS__) |
| #define | vmsbc_vx_i8m4_b2(...) __riscv_vmsbc_vx_i8m4_b2(__VA_ARGS__) |
| #define | vmsbc_vx_i8m8_b1(...) __riscv_vmsbc_vx_i8m8_b1(__VA_ARGS__) |
| #define | vmsbc_vx_i8mf2_b16(...) __riscv_vmsbc_vx_i8mf2_b16(__VA_ARGS__) |
| #define | vmsbc_vx_i8mf4_b32(...) __riscv_vmsbc_vx_i8mf4_b32(__VA_ARGS__) |
| #define | vmsbc_vx_i8mf8_b64(...) __riscv_vmsbc_vx_i8mf8_b64(__VA_ARGS__) |
| #define | vmsbc_vx_u16m1_b16(...) __riscv_vmsbc_vx_u16m1_b16(__VA_ARGS__) |
| #define | vmsbc_vx_u16m2_b8(...) __riscv_vmsbc_vx_u16m2_b8(__VA_ARGS__) |
| #define | vmsbc_vx_u16m4_b4(...) __riscv_vmsbc_vx_u16m4_b4(__VA_ARGS__) |
| #define | vmsbc_vx_u16m8_b2(...) __riscv_vmsbc_vx_u16m8_b2(__VA_ARGS__) |
| #define | vmsbc_vx_u16mf2_b32(...) __riscv_vmsbc_vx_u16mf2_b32(__VA_ARGS__) |
| #define | vmsbc_vx_u16mf4_b64(...) __riscv_vmsbc_vx_u16mf4_b64(__VA_ARGS__) |
| #define | vmsbc_vx_u32m1_b32(...) __riscv_vmsbc_vx_u32m1_b32(__VA_ARGS__) |
| #define | vmsbc_vx_u32m2_b16(...) __riscv_vmsbc_vx_u32m2_b16(__VA_ARGS__) |
| #define | vmsbc_vx_u32m4_b8(...) __riscv_vmsbc_vx_u32m4_b8(__VA_ARGS__) |
| #define | vmsbc_vx_u32m8_b4(...) __riscv_vmsbc_vx_u32m8_b4(__VA_ARGS__) |
| #define | vmsbc_vx_u32mf2_b64(...) __riscv_vmsbc_vx_u32mf2_b64(__VA_ARGS__) |
| #define | vmsbc_vx_u64m1_b64(...) __riscv_vmsbc_vx_u64m1_b64(__VA_ARGS__) |
| #define | vmsbc_vx_u64m2_b32(...) __riscv_vmsbc_vx_u64m2_b32(__VA_ARGS__) |
| #define | vmsbc_vx_u64m4_b16(...) __riscv_vmsbc_vx_u64m4_b16(__VA_ARGS__) |
| #define | vmsbc_vx_u64m8_b8(...) __riscv_vmsbc_vx_u64m8_b8(__VA_ARGS__) |
| #define | vmsbc_vx_u8m1_b8(...) __riscv_vmsbc_vx_u8m1_b8(__VA_ARGS__) |
| #define | vmsbc_vx_u8m2_b4(...) __riscv_vmsbc_vx_u8m2_b4(__VA_ARGS__) |
| #define | vmsbc_vx_u8m4_b2(...) __riscv_vmsbc_vx_u8m4_b2(__VA_ARGS__) |
| #define | vmsbc_vx_u8m8_b1(...) __riscv_vmsbc_vx_u8m8_b1(__VA_ARGS__) |
| #define | vmsbc_vx_u8mf2_b16(...) __riscv_vmsbc_vx_u8mf2_b16(__VA_ARGS__) |
| #define | vmsbc_vx_u8mf4_b32(...) __riscv_vmsbc_vx_u8mf4_b32(__VA_ARGS__) |
| #define | vmsbc_vx_u8mf8_b64(...) __riscv_vmsbc_vx_u8mf8_b64(__VA_ARGS__) |
| #define | vmsbc_vxm_i16m1_b16(...) __riscv_vmsbc_vxm_i16m1_b16(__VA_ARGS__) |
| #define | vmsbc_vxm_i16m2_b8(...) __riscv_vmsbc_vxm_i16m2_b8(__VA_ARGS__) |
| #define | vmsbc_vxm_i16m4_b4(...) __riscv_vmsbc_vxm_i16m4_b4(__VA_ARGS__) |
| #define | vmsbc_vxm_i16m8_b2(...) __riscv_vmsbc_vxm_i16m8_b2(__VA_ARGS__) |
| #define | vmsbc_vxm_i16mf2_b32(...) __riscv_vmsbc_vxm_i16mf2_b32(__VA_ARGS__) |
| #define | vmsbc_vxm_i16mf4_b64(...) __riscv_vmsbc_vxm_i16mf4_b64(__VA_ARGS__) |
| #define | vmsbc_vxm_i32m1_b32(...) __riscv_vmsbc_vxm_i32m1_b32(__VA_ARGS__) |
| #define | vmsbc_vxm_i32m2_b16(...) __riscv_vmsbc_vxm_i32m2_b16(__VA_ARGS__) |
| #define | vmsbc_vxm_i32m4_b8(...) __riscv_vmsbc_vxm_i32m4_b8(__VA_ARGS__) |
| #define | vmsbc_vxm_i32m8_b4(...) __riscv_vmsbc_vxm_i32m8_b4(__VA_ARGS__) |
| #define | vmsbc_vxm_i32mf2_b64(...) __riscv_vmsbc_vxm_i32mf2_b64(__VA_ARGS__) |
| #define | vmsbc_vxm_i64m1_b64(...) __riscv_vmsbc_vxm_i64m1_b64(__VA_ARGS__) |
| #define | vmsbc_vxm_i64m2_b32(...) __riscv_vmsbc_vxm_i64m2_b32(__VA_ARGS__) |
| #define | vmsbc_vxm_i64m4_b16(...) __riscv_vmsbc_vxm_i64m4_b16(__VA_ARGS__) |
| #define | vmsbc_vxm_i64m8_b8(...) __riscv_vmsbc_vxm_i64m8_b8(__VA_ARGS__) |
| #define | vmsbc_vxm_i8m1_b8(...) __riscv_vmsbc_vxm_i8m1_b8(__VA_ARGS__) |
| #define | vmsbc_vxm_i8m2_b4(...) __riscv_vmsbc_vxm_i8m2_b4(__VA_ARGS__) |
| #define | vmsbc_vxm_i8m4_b2(...) __riscv_vmsbc_vxm_i8m4_b2(__VA_ARGS__) |
| #define | vmsbc_vxm_i8m8_b1(...) __riscv_vmsbc_vxm_i8m8_b1(__VA_ARGS__) |
| #define | vmsbc_vxm_i8mf2_b16(...) __riscv_vmsbc_vxm_i8mf2_b16(__VA_ARGS__) |
| #define | vmsbc_vxm_i8mf4_b32(...) __riscv_vmsbc_vxm_i8mf4_b32(__VA_ARGS__) |
| #define | vmsbc_vxm_i8mf8_b64(...) __riscv_vmsbc_vxm_i8mf8_b64(__VA_ARGS__) |
| #define | vmsbc_vxm_u16m1_b16(...) __riscv_vmsbc_vxm_u16m1_b16(__VA_ARGS__) |
| #define | vmsbc_vxm_u16m2_b8(...) __riscv_vmsbc_vxm_u16m2_b8(__VA_ARGS__) |
| #define | vmsbc_vxm_u16m4_b4(...) __riscv_vmsbc_vxm_u16m4_b4(__VA_ARGS__) |
| #define | vmsbc_vxm_u16m8_b2(...) __riscv_vmsbc_vxm_u16m8_b2(__VA_ARGS__) |
| #define | vmsbc_vxm_u16mf2_b32(...) __riscv_vmsbc_vxm_u16mf2_b32(__VA_ARGS__) |
| #define | vmsbc_vxm_u16mf4_b64(...) __riscv_vmsbc_vxm_u16mf4_b64(__VA_ARGS__) |
| #define | vmsbc_vxm_u32m1_b32(...) __riscv_vmsbc_vxm_u32m1_b32(__VA_ARGS__) |
| #define | vmsbc_vxm_u32m2_b16(...) __riscv_vmsbc_vxm_u32m2_b16(__VA_ARGS__) |
| #define | vmsbc_vxm_u32m4_b8(...) __riscv_vmsbc_vxm_u32m4_b8(__VA_ARGS__) |
| #define | vmsbc_vxm_u32m8_b4(...) __riscv_vmsbc_vxm_u32m8_b4(__VA_ARGS__) |
| #define | vmsbc_vxm_u32mf2_b64(...) __riscv_vmsbc_vxm_u32mf2_b64(__VA_ARGS__) |
| #define | vmsbc_vxm_u64m1_b64(...) __riscv_vmsbc_vxm_u64m1_b64(__VA_ARGS__) |
| #define | vmsbc_vxm_u64m2_b32(...) __riscv_vmsbc_vxm_u64m2_b32(__VA_ARGS__) |
| #define | vmsbc_vxm_u64m4_b16(...) __riscv_vmsbc_vxm_u64m4_b16(__VA_ARGS__) |
| #define | vmsbc_vxm_u64m8_b8(...) __riscv_vmsbc_vxm_u64m8_b8(__VA_ARGS__) |
| #define | vmsbc_vxm_u8m1_b8(...) __riscv_vmsbc_vxm_u8m1_b8(__VA_ARGS__) |
| #define | vmsbc_vxm_u8m2_b4(...) __riscv_vmsbc_vxm_u8m2_b4(__VA_ARGS__) |
| #define | vmsbc_vxm_u8m4_b2(...) __riscv_vmsbc_vxm_u8m4_b2(__VA_ARGS__) |
| #define | vmsbc_vxm_u8m8_b1(...) __riscv_vmsbc_vxm_u8m8_b1(__VA_ARGS__) |
| #define | vmsbc_vxm_u8mf2_b16(...) __riscv_vmsbc_vxm_u8mf2_b16(__VA_ARGS__) |
| #define | vmsbc_vxm_u8mf4_b32(...) __riscv_vmsbc_vxm_u8mf4_b32(__VA_ARGS__) |
| #define | vmsbc_vxm_u8mf8_b64(...) __riscv_vmsbc_vxm_u8mf8_b64(__VA_ARGS__) |
| #define | vmsbf_m_b1(...) __riscv_vmsbf_m_b1(__VA_ARGS__) |
| #define | vmsbf_m_b16(...) __riscv_vmsbf_m_b16(__VA_ARGS__) |
| #define | vmsbf_m_b16_m(...) __riscv_vmsbf_m_b16_mu(__VA_ARGS__) |
| #define | vmsbf_m_b1_m(...) __riscv_vmsbf_m_b1_mu(__VA_ARGS__) |
| #define | vmsbf_m_b2(...) __riscv_vmsbf_m_b2(__VA_ARGS__) |
| #define | vmsbf_m_b2_m(...) __riscv_vmsbf_m_b2_mu(__VA_ARGS__) |
| #define | vmsbf_m_b32(...) __riscv_vmsbf_m_b32(__VA_ARGS__) |
| #define | vmsbf_m_b32_m(...) __riscv_vmsbf_m_b32_mu(__VA_ARGS__) |
| #define | vmsbf_m_b4(...) __riscv_vmsbf_m_b4(__VA_ARGS__) |
| #define | vmsbf_m_b4_m(...) __riscv_vmsbf_m_b4_mu(__VA_ARGS__) |
| #define | vmsbf_m_b64(...) __riscv_vmsbf_m_b64(__VA_ARGS__) |
| #define | vmsbf_m_b64_m(...) __riscv_vmsbf_m_b64_mu(__VA_ARGS__) |
| #define | vmsbf_m_b8(...) __riscv_vmsbf_m_b8(__VA_ARGS__) |
| #define | vmsbf_m_b8_m(...) __riscv_vmsbf_m_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_i16m1_b16(...) __riscv_vmseq_vv_i16m1_b16(__VA_ARGS__) |
| #define | vmseq_vv_i16m1_b16_m(...) __riscv_vmseq_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmseq_vv_i16m2_b8(...) __riscv_vmseq_vv_i16m2_b8(__VA_ARGS__) |
| #define | vmseq_vv_i16m2_b8_m(...) __riscv_vmseq_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_i16m4_b4(...) __riscv_vmseq_vv_i16m4_b4(__VA_ARGS__) |
| #define | vmseq_vv_i16m4_b4_m(...) __riscv_vmseq_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmseq_vv_i16m8_b2(...) __riscv_vmseq_vv_i16m8_b2(__VA_ARGS__) |
| #define | vmseq_vv_i16m8_b2_m(...) __riscv_vmseq_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmseq_vv_i16mf2_b32(...) __riscv_vmseq_vv_i16mf2_b32(__VA_ARGS__) |
| #define | vmseq_vv_i16mf2_b32_m(...) __riscv_vmseq_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmseq_vv_i16mf4_b64(...) __riscv_vmseq_vv_i16mf4_b64(__VA_ARGS__) |
| #define | vmseq_vv_i16mf4_b64_m(...) __riscv_vmseq_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmseq_vv_i32m1_b32(...) __riscv_vmseq_vv_i32m1_b32(__VA_ARGS__) |
| #define | vmseq_vv_i32m1_b32_m(...) __riscv_vmseq_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmseq_vv_i32m2_b16(...) __riscv_vmseq_vv_i32m2_b16(__VA_ARGS__) |
| #define | vmseq_vv_i32m2_b16_m(...) __riscv_vmseq_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmseq_vv_i32m4_b8(...) __riscv_vmseq_vv_i32m4_b8(__VA_ARGS__) |
| #define | vmseq_vv_i32m4_b8_m(...) __riscv_vmseq_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_i32m8_b4(...) __riscv_vmseq_vv_i32m8_b4(__VA_ARGS__) |
| #define | vmseq_vv_i32m8_b4_m(...) __riscv_vmseq_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmseq_vv_i32mf2_b64(...) __riscv_vmseq_vv_i32mf2_b64(__VA_ARGS__) |
| #define | vmseq_vv_i32mf2_b64_m(...) __riscv_vmseq_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmseq_vv_i64m1_b64(...) __riscv_vmseq_vv_i64m1_b64(__VA_ARGS__) |
| #define | vmseq_vv_i64m1_b64_m(...) __riscv_vmseq_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmseq_vv_i64m2_b32(...) __riscv_vmseq_vv_i64m2_b32(__VA_ARGS__) |
| #define | vmseq_vv_i64m2_b32_m(...) __riscv_vmseq_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmseq_vv_i64m4_b16(...) __riscv_vmseq_vv_i64m4_b16(__VA_ARGS__) |
| #define | vmseq_vv_i64m4_b16_m(...) __riscv_vmseq_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmseq_vv_i64m8_b8(...) __riscv_vmseq_vv_i64m8_b8(__VA_ARGS__) |
| #define | vmseq_vv_i64m8_b8_m(...) __riscv_vmseq_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_i8m1_b8(...) __riscv_vmseq_vv_i8m1_b8(__VA_ARGS__) |
| #define | vmseq_vv_i8m1_b8_m(...) __riscv_vmseq_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_i8m2_b4(...) __riscv_vmseq_vv_i8m2_b4(__VA_ARGS__) |
| #define | vmseq_vv_i8m2_b4_m(...) __riscv_vmseq_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmseq_vv_i8m4_b2(...) __riscv_vmseq_vv_i8m4_b2(__VA_ARGS__) |
| #define | vmseq_vv_i8m4_b2_m(...) __riscv_vmseq_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmseq_vv_i8m8_b1(...) __riscv_vmseq_vv_i8m8_b1(__VA_ARGS__) |
| #define | vmseq_vv_i8m8_b1_m(...) __riscv_vmseq_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmseq_vv_i8mf2_b16(...) __riscv_vmseq_vv_i8mf2_b16(__VA_ARGS__) |
| #define | vmseq_vv_i8mf2_b16_m(...) __riscv_vmseq_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmseq_vv_i8mf4_b32(...) __riscv_vmseq_vv_i8mf4_b32(__VA_ARGS__) |
| #define | vmseq_vv_i8mf4_b32_m(...) __riscv_vmseq_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmseq_vv_i8mf8_b64(...) __riscv_vmseq_vv_i8mf8_b64(__VA_ARGS__) |
| #define | vmseq_vv_i8mf8_b64_m(...) __riscv_vmseq_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmseq_vv_u16m1_b16(...) __riscv_vmseq_vv_u16m1_b16(__VA_ARGS__) |
| #define | vmseq_vv_u16m1_b16_m(...) __riscv_vmseq_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmseq_vv_u16m2_b8(...) __riscv_vmseq_vv_u16m2_b8(__VA_ARGS__) |
| #define | vmseq_vv_u16m2_b8_m(...) __riscv_vmseq_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_u16m4_b4(...) __riscv_vmseq_vv_u16m4_b4(__VA_ARGS__) |
| #define | vmseq_vv_u16m4_b4_m(...) __riscv_vmseq_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmseq_vv_u16m8_b2(...) __riscv_vmseq_vv_u16m8_b2(__VA_ARGS__) |
| #define | vmseq_vv_u16m8_b2_m(...) __riscv_vmseq_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmseq_vv_u16mf2_b32(...) __riscv_vmseq_vv_u16mf2_b32(__VA_ARGS__) |
| #define | vmseq_vv_u16mf2_b32_m(...) __riscv_vmseq_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmseq_vv_u16mf4_b64(...) __riscv_vmseq_vv_u16mf4_b64(__VA_ARGS__) |
| #define | vmseq_vv_u16mf4_b64_m(...) __riscv_vmseq_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmseq_vv_u32m1_b32(...) __riscv_vmseq_vv_u32m1_b32(__VA_ARGS__) |
| #define | vmseq_vv_u32m1_b32_m(...) __riscv_vmseq_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmseq_vv_u32m2_b16(...) __riscv_vmseq_vv_u32m2_b16(__VA_ARGS__) |
| #define | vmseq_vv_u32m2_b16_m(...) __riscv_vmseq_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmseq_vv_u32m4_b8(...) __riscv_vmseq_vv_u32m4_b8(__VA_ARGS__) |
| #define | vmseq_vv_u32m4_b8_m(...) __riscv_vmseq_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_u32m8_b4(...) __riscv_vmseq_vv_u32m8_b4(__VA_ARGS__) |
| #define | vmseq_vv_u32m8_b4_m(...) __riscv_vmseq_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmseq_vv_u32mf2_b64(...) __riscv_vmseq_vv_u32mf2_b64(__VA_ARGS__) |
| #define | vmseq_vv_u32mf2_b64_m(...) __riscv_vmseq_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmseq_vv_u64m1_b64(...) __riscv_vmseq_vv_u64m1_b64(__VA_ARGS__) |
| #define | vmseq_vv_u64m1_b64_m(...) __riscv_vmseq_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmseq_vv_u64m2_b32(...) __riscv_vmseq_vv_u64m2_b32(__VA_ARGS__) |
| #define | vmseq_vv_u64m2_b32_m(...) __riscv_vmseq_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmseq_vv_u64m4_b16(...) __riscv_vmseq_vv_u64m4_b16(__VA_ARGS__) |
| #define | vmseq_vv_u64m4_b16_m(...) __riscv_vmseq_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmseq_vv_u64m8_b8(...) __riscv_vmseq_vv_u64m8_b8(__VA_ARGS__) |
| #define | vmseq_vv_u64m8_b8_m(...) __riscv_vmseq_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_u8m1_b8(...) __riscv_vmseq_vv_u8m1_b8(__VA_ARGS__) |
| #define | vmseq_vv_u8m1_b8_m(...) __riscv_vmseq_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmseq_vv_u8m2_b4(...) __riscv_vmseq_vv_u8m2_b4(__VA_ARGS__) |
| #define | vmseq_vv_u8m2_b4_m(...) __riscv_vmseq_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmseq_vv_u8m4_b2(...) __riscv_vmseq_vv_u8m4_b2(__VA_ARGS__) |
| #define | vmseq_vv_u8m4_b2_m(...) __riscv_vmseq_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmseq_vv_u8m8_b1(...) __riscv_vmseq_vv_u8m8_b1(__VA_ARGS__) |
| #define | vmseq_vv_u8m8_b1_m(...) __riscv_vmseq_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmseq_vv_u8mf2_b16(...) __riscv_vmseq_vv_u8mf2_b16(__VA_ARGS__) |
| #define | vmseq_vv_u8mf2_b16_m(...) __riscv_vmseq_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmseq_vv_u8mf4_b32(...) __riscv_vmseq_vv_u8mf4_b32(__VA_ARGS__) |
| #define | vmseq_vv_u8mf4_b32_m(...) __riscv_vmseq_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmseq_vv_u8mf8_b64(...) __riscv_vmseq_vv_u8mf8_b64(__VA_ARGS__) |
| #define | vmseq_vv_u8mf8_b64_m(...) __riscv_vmseq_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmseq_vx_i16m1_b16(...) __riscv_vmseq_vx_i16m1_b16(__VA_ARGS__) |
| #define | vmseq_vx_i16m1_b16_m(...) __riscv_vmseq_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmseq_vx_i16m2_b8(...) __riscv_vmseq_vx_i16m2_b8(__VA_ARGS__) |
| #define | vmseq_vx_i16m2_b8_m(...) __riscv_vmseq_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmseq_vx_i16m4_b4(...) __riscv_vmseq_vx_i16m4_b4(__VA_ARGS__) |
| #define | vmseq_vx_i16m4_b4_m(...) __riscv_vmseq_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmseq_vx_i16m8_b2(...) __riscv_vmseq_vx_i16m8_b2(__VA_ARGS__) |
| #define | vmseq_vx_i16m8_b2_m(...) __riscv_vmseq_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmseq_vx_i16mf2_b32(...) __riscv_vmseq_vx_i16mf2_b32(__VA_ARGS__) |
| #define | vmseq_vx_i16mf2_b32_m(...) __riscv_vmseq_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmseq_vx_i16mf4_b64(...) __riscv_vmseq_vx_i16mf4_b64(__VA_ARGS__) |
| #define | vmseq_vx_i16mf4_b64_m(...) __riscv_vmseq_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmseq_vx_i32m1_b32(...) __riscv_vmseq_vx_i32m1_b32(__VA_ARGS__) |
| #define | vmseq_vx_i32m1_b32_m(...) __riscv_vmseq_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmseq_vx_i32m2_b16(...) __riscv_vmseq_vx_i32m2_b16(__VA_ARGS__) |
| #define | vmseq_vx_i32m2_b16_m(...) __riscv_vmseq_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmseq_vx_i32m4_b8(...) __riscv_vmseq_vx_i32m4_b8(__VA_ARGS__) |
| #define | vmseq_vx_i32m4_b8_m(...) __riscv_vmseq_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmseq_vx_i32m8_b4(...) __riscv_vmseq_vx_i32m8_b4(__VA_ARGS__) |
| #define | vmseq_vx_i32m8_b4_m(...) __riscv_vmseq_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmseq_vx_i32mf2_b64(...) __riscv_vmseq_vx_i32mf2_b64(__VA_ARGS__) |
| #define | vmseq_vx_i32mf2_b64_m(...) __riscv_vmseq_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmseq_vx_i64m1_b64(...) __riscv_vmseq_vx_i64m1_b64(__VA_ARGS__) |
| #define | vmseq_vx_i64m1_b64_m(...) __riscv_vmseq_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmseq_vx_i64m2_b32(...) __riscv_vmseq_vx_i64m2_b32(__VA_ARGS__) |
| #define | vmseq_vx_i64m2_b32_m(...) __riscv_vmseq_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmseq_vx_i64m4_b16(...) __riscv_vmseq_vx_i64m4_b16(__VA_ARGS__) |
| #define | vmseq_vx_i64m4_b16_m(...) __riscv_vmseq_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmseq_vx_i64m8_b8(...) __riscv_vmseq_vx_i64m8_b8(__VA_ARGS__) |
| #define | vmseq_vx_i64m8_b8_m(...) __riscv_vmseq_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmseq_vx_i8m1_b8(...) __riscv_vmseq_vx_i8m1_b8(__VA_ARGS__) |
| #define | vmseq_vx_i8m1_b8_m(...) __riscv_vmseq_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmseq_vx_i8m2_b4(...) __riscv_vmseq_vx_i8m2_b4(__VA_ARGS__) |
| #define | vmseq_vx_i8m2_b4_m(...) __riscv_vmseq_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmseq_vx_i8m4_b2(...) __riscv_vmseq_vx_i8m4_b2(__VA_ARGS__) |
| #define | vmseq_vx_i8m4_b2_m(...) __riscv_vmseq_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmseq_vx_i8m8_b1(...) __riscv_vmseq_vx_i8m8_b1(__VA_ARGS__) |
| #define | vmseq_vx_i8m8_b1_m(...) __riscv_vmseq_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmseq_vx_i8mf2_b16(...) __riscv_vmseq_vx_i8mf2_b16(__VA_ARGS__) |
| #define | vmseq_vx_i8mf2_b16_m(...) __riscv_vmseq_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmseq_vx_i8mf4_b32(...) __riscv_vmseq_vx_i8mf4_b32(__VA_ARGS__) |
| #define | vmseq_vx_i8mf4_b32_m(...) __riscv_vmseq_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmseq_vx_i8mf8_b64(...) __riscv_vmseq_vx_i8mf8_b64(__VA_ARGS__) |
| #define | vmseq_vx_i8mf8_b64_m(...) __riscv_vmseq_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmseq_vx_u16m1_b16(...) __riscv_vmseq_vx_u16m1_b16(__VA_ARGS__) |
| #define | vmseq_vx_u16m1_b16_m(...) __riscv_vmseq_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmseq_vx_u16m2_b8(...) __riscv_vmseq_vx_u16m2_b8(__VA_ARGS__) |
| #define | vmseq_vx_u16m2_b8_m(...) __riscv_vmseq_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmseq_vx_u16m4_b4(...) __riscv_vmseq_vx_u16m4_b4(__VA_ARGS__) |
| #define | vmseq_vx_u16m4_b4_m(...) __riscv_vmseq_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmseq_vx_u16m8_b2(...) __riscv_vmseq_vx_u16m8_b2(__VA_ARGS__) |
| #define | vmseq_vx_u16m8_b2_m(...) __riscv_vmseq_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmseq_vx_u16mf2_b32(...) __riscv_vmseq_vx_u16mf2_b32(__VA_ARGS__) |
| #define | vmseq_vx_u16mf2_b32_m(...) __riscv_vmseq_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmseq_vx_u16mf4_b64(...) __riscv_vmseq_vx_u16mf4_b64(__VA_ARGS__) |
| #define | vmseq_vx_u16mf4_b64_m(...) __riscv_vmseq_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmseq_vx_u32m1_b32(...) __riscv_vmseq_vx_u32m1_b32(__VA_ARGS__) |
| #define | vmseq_vx_u32m1_b32_m(...) __riscv_vmseq_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmseq_vx_u32m2_b16(...) __riscv_vmseq_vx_u32m2_b16(__VA_ARGS__) |
| #define | vmseq_vx_u32m2_b16_m(...) __riscv_vmseq_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmseq_vx_u32m4_b8(...) __riscv_vmseq_vx_u32m4_b8(__VA_ARGS__) |
| #define | vmseq_vx_u32m4_b8_m(...) __riscv_vmseq_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmseq_vx_u32m8_b4(...) __riscv_vmseq_vx_u32m8_b4(__VA_ARGS__) |
| #define | vmseq_vx_u32m8_b4_m(...) __riscv_vmseq_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmseq_vx_u32mf2_b64(...) __riscv_vmseq_vx_u32mf2_b64(__VA_ARGS__) |
| #define | vmseq_vx_u32mf2_b64_m(...) __riscv_vmseq_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmseq_vx_u64m1_b64(...) __riscv_vmseq_vx_u64m1_b64(__VA_ARGS__) |
| #define | vmseq_vx_u64m1_b64_m(...) __riscv_vmseq_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmseq_vx_u64m2_b32(...) __riscv_vmseq_vx_u64m2_b32(__VA_ARGS__) |
| #define | vmseq_vx_u64m2_b32_m(...) __riscv_vmseq_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmseq_vx_u64m4_b16(...) __riscv_vmseq_vx_u64m4_b16(__VA_ARGS__) |
| #define | vmseq_vx_u64m4_b16_m(...) __riscv_vmseq_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmseq_vx_u64m8_b8(...) __riscv_vmseq_vx_u64m8_b8(__VA_ARGS__) |
| #define | vmseq_vx_u64m8_b8_m(...) __riscv_vmseq_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmseq_vx_u8m1_b8(...) __riscv_vmseq_vx_u8m1_b8(__VA_ARGS__) |
| #define | vmseq_vx_u8m1_b8_m(...) __riscv_vmseq_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmseq_vx_u8m2_b4(...) __riscv_vmseq_vx_u8m2_b4(__VA_ARGS__) |
| #define | vmseq_vx_u8m2_b4_m(...) __riscv_vmseq_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmseq_vx_u8m4_b2(...) __riscv_vmseq_vx_u8m4_b2(__VA_ARGS__) |
| #define | vmseq_vx_u8m4_b2_m(...) __riscv_vmseq_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmseq_vx_u8m8_b1(...) __riscv_vmseq_vx_u8m8_b1(__VA_ARGS__) |
| #define | vmseq_vx_u8m8_b1_m(...) __riscv_vmseq_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmseq_vx_u8mf2_b16(...) __riscv_vmseq_vx_u8mf2_b16(__VA_ARGS__) |
| #define | vmseq_vx_u8mf2_b16_m(...) __riscv_vmseq_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmseq_vx_u8mf4_b32(...) __riscv_vmseq_vx_u8mf4_b32(__VA_ARGS__) |
| #define | vmseq_vx_u8mf4_b32_m(...) __riscv_vmseq_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmseq_vx_u8mf8_b64(...) __riscv_vmseq_vx_u8mf8_b64(__VA_ARGS__) |
| #define | vmseq_vx_u8mf8_b64_m(...) __riscv_vmseq_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmset_m_b1(...) __riscv_vmset_m_b1(__VA_ARGS__) |
| #define | vmset_m_b16(...) __riscv_vmset_m_b16(__VA_ARGS__) |
| #define | vmset_m_b2(...) __riscv_vmset_m_b2(__VA_ARGS__) |
| #define | vmset_m_b32(...) __riscv_vmset_m_b32(__VA_ARGS__) |
| #define | vmset_m_b4(...) __riscv_vmset_m_b4(__VA_ARGS__) |
| #define | vmset_m_b64(...) __riscv_vmset_m_b64(__VA_ARGS__) |
| #define | vmset_m_b8(...) __riscv_vmset_m_b8(__VA_ARGS__) |
| #define | vmsge_vv_i16m1_b16(...) __riscv_vmsge_vv_i16m1_b16(__VA_ARGS__) |
| #define | vmsge_vv_i16m1_b16_m(...) __riscv_vmsge_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmsge_vv_i16m2_b8(...) __riscv_vmsge_vv_i16m2_b8(__VA_ARGS__) |
| #define | vmsge_vv_i16m2_b8_m(...) __riscv_vmsge_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmsge_vv_i16m4_b4(...) __riscv_vmsge_vv_i16m4_b4(__VA_ARGS__) |
| #define | vmsge_vv_i16m4_b4_m(...) __riscv_vmsge_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmsge_vv_i16m8_b2(...) __riscv_vmsge_vv_i16m8_b2(__VA_ARGS__) |
| #define | vmsge_vv_i16m8_b2_m(...) __riscv_vmsge_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmsge_vv_i16mf2_b32(...) __riscv_vmsge_vv_i16mf2_b32(__VA_ARGS__) |
| #define | vmsge_vv_i16mf2_b32_m(...) __riscv_vmsge_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsge_vv_i16mf4_b64(...) __riscv_vmsge_vv_i16mf4_b64(__VA_ARGS__) |
| #define | vmsge_vv_i16mf4_b64_m(...) __riscv_vmsge_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsge_vv_i32m1_b32(...) __riscv_vmsge_vv_i32m1_b32(__VA_ARGS__) |
| #define | vmsge_vv_i32m1_b32_m(...) __riscv_vmsge_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmsge_vv_i32m2_b16(...) __riscv_vmsge_vv_i32m2_b16(__VA_ARGS__) |
| #define | vmsge_vv_i32m2_b16_m(...) __riscv_vmsge_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmsge_vv_i32m4_b8(...) __riscv_vmsge_vv_i32m4_b8(__VA_ARGS__) |
| #define | vmsge_vv_i32m4_b8_m(...) __riscv_vmsge_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmsge_vv_i32m8_b4(...) __riscv_vmsge_vv_i32m8_b4(__VA_ARGS__) |
| #define | vmsge_vv_i32m8_b4_m(...) __riscv_vmsge_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmsge_vv_i32mf2_b64(...) __riscv_vmsge_vv_i32mf2_b64(__VA_ARGS__) |
| #define | vmsge_vv_i32mf2_b64_m(...) __riscv_vmsge_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsge_vv_i64m1_b64(...) __riscv_vmsge_vv_i64m1_b64(__VA_ARGS__) |
| #define | vmsge_vv_i64m1_b64_m(...) __riscv_vmsge_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmsge_vv_i64m2_b32(...) __riscv_vmsge_vv_i64m2_b32(__VA_ARGS__) |
| #define | vmsge_vv_i64m2_b32_m(...) __riscv_vmsge_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmsge_vv_i64m4_b16(...) __riscv_vmsge_vv_i64m4_b16(__VA_ARGS__) |
| #define | vmsge_vv_i64m4_b16_m(...) __riscv_vmsge_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmsge_vv_i64m8_b8(...) __riscv_vmsge_vv_i64m8_b8(__VA_ARGS__) |
| #define | vmsge_vv_i64m8_b8_m(...) __riscv_vmsge_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmsge_vv_i8m1_b8(...) __riscv_vmsge_vv_i8m1_b8(__VA_ARGS__) |
| #define | vmsge_vv_i8m1_b8_m(...) __riscv_vmsge_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmsge_vv_i8m2_b4(...) __riscv_vmsge_vv_i8m2_b4(__VA_ARGS__) |
| #define | vmsge_vv_i8m2_b4_m(...) __riscv_vmsge_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmsge_vv_i8m4_b2(...) __riscv_vmsge_vv_i8m4_b2(__VA_ARGS__) |
| #define | vmsge_vv_i8m4_b2_m(...) __riscv_vmsge_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmsge_vv_i8m8_b1(...) __riscv_vmsge_vv_i8m8_b1(__VA_ARGS__) |
| #define | vmsge_vv_i8m8_b1_m(...) __riscv_vmsge_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmsge_vv_i8mf2_b16(...) __riscv_vmsge_vv_i8mf2_b16(__VA_ARGS__) |
| #define | vmsge_vv_i8mf2_b16_m(...) __riscv_vmsge_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsge_vv_i8mf4_b32(...) __riscv_vmsge_vv_i8mf4_b32(__VA_ARGS__) |
| #define | vmsge_vv_i8mf4_b32_m(...) __riscv_vmsge_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsge_vv_i8mf8_b64(...) __riscv_vmsge_vv_i8mf8_b64(__VA_ARGS__) |
| #define | vmsge_vv_i8mf8_b64_m(...) __riscv_vmsge_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsge_vx_i16m1_b16(...) __riscv_vmsge_vx_i16m1_b16(__VA_ARGS__) |
| #define | vmsge_vx_i16m1_b16_m(...) __riscv_vmsge_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmsge_vx_i16m2_b8(...) __riscv_vmsge_vx_i16m2_b8(__VA_ARGS__) |
| #define | vmsge_vx_i16m2_b8_m(...) __riscv_vmsge_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmsge_vx_i16m4_b4(...) __riscv_vmsge_vx_i16m4_b4(__VA_ARGS__) |
| #define | vmsge_vx_i16m4_b4_m(...) __riscv_vmsge_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmsge_vx_i16m8_b2(...) __riscv_vmsge_vx_i16m8_b2(__VA_ARGS__) |
| #define | vmsge_vx_i16m8_b2_m(...) __riscv_vmsge_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmsge_vx_i16mf2_b32(...) __riscv_vmsge_vx_i16mf2_b32(__VA_ARGS__) |
| #define | vmsge_vx_i16mf2_b32_m(...) __riscv_vmsge_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsge_vx_i16mf4_b64(...) __riscv_vmsge_vx_i16mf4_b64(__VA_ARGS__) |
| #define | vmsge_vx_i16mf4_b64_m(...) __riscv_vmsge_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsge_vx_i32m1_b32(...) __riscv_vmsge_vx_i32m1_b32(__VA_ARGS__) |
| #define | vmsge_vx_i32m1_b32_m(...) __riscv_vmsge_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmsge_vx_i32m2_b16(...) __riscv_vmsge_vx_i32m2_b16(__VA_ARGS__) |
| #define | vmsge_vx_i32m2_b16_m(...) __riscv_vmsge_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmsge_vx_i32m4_b8(...) __riscv_vmsge_vx_i32m4_b8(__VA_ARGS__) |
| #define | vmsge_vx_i32m4_b8_m(...) __riscv_vmsge_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmsge_vx_i32m8_b4(...) __riscv_vmsge_vx_i32m8_b4(__VA_ARGS__) |
| #define | vmsge_vx_i32m8_b4_m(...) __riscv_vmsge_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmsge_vx_i32mf2_b64(...) __riscv_vmsge_vx_i32mf2_b64(__VA_ARGS__) |
| #define | vmsge_vx_i32mf2_b64_m(...) __riscv_vmsge_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsge_vx_i64m1_b64(...) __riscv_vmsge_vx_i64m1_b64(__VA_ARGS__) |
| #define | vmsge_vx_i64m1_b64_m(...) __riscv_vmsge_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmsge_vx_i64m2_b32(...) __riscv_vmsge_vx_i64m2_b32(__VA_ARGS__) |
| #define | vmsge_vx_i64m2_b32_m(...) __riscv_vmsge_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmsge_vx_i64m4_b16(...) __riscv_vmsge_vx_i64m4_b16(__VA_ARGS__) |
| #define | vmsge_vx_i64m4_b16_m(...) __riscv_vmsge_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmsge_vx_i64m8_b8(...) __riscv_vmsge_vx_i64m8_b8(__VA_ARGS__) |
| #define | vmsge_vx_i64m8_b8_m(...) __riscv_vmsge_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmsge_vx_i8m1_b8(...) __riscv_vmsge_vx_i8m1_b8(__VA_ARGS__) |
| #define | vmsge_vx_i8m1_b8_m(...) __riscv_vmsge_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmsge_vx_i8m2_b4(...) __riscv_vmsge_vx_i8m2_b4(__VA_ARGS__) |
| #define | vmsge_vx_i8m2_b4_m(...) __riscv_vmsge_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmsge_vx_i8m4_b2(...) __riscv_vmsge_vx_i8m4_b2(__VA_ARGS__) |
| #define | vmsge_vx_i8m4_b2_m(...) __riscv_vmsge_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmsge_vx_i8m8_b1(...) __riscv_vmsge_vx_i8m8_b1(__VA_ARGS__) |
| #define | vmsge_vx_i8m8_b1_m(...) __riscv_vmsge_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmsge_vx_i8mf2_b16(...) __riscv_vmsge_vx_i8mf2_b16(__VA_ARGS__) |
| #define | vmsge_vx_i8mf2_b16_m(...) __riscv_vmsge_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsge_vx_i8mf4_b32(...) __riscv_vmsge_vx_i8mf4_b32(__VA_ARGS__) |
| #define | vmsge_vx_i8mf4_b32_m(...) __riscv_vmsge_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsge_vx_i8mf8_b64(...) __riscv_vmsge_vx_i8mf8_b64(__VA_ARGS__) |
| #define | vmsge_vx_i8mf8_b64_m(...) __riscv_vmsge_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u16m1_b16(...) __riscv_vmsgeu_vv_u16m1_b16(__VA_ARGS__) |
| #define | vmsgeu_vv_u16m1_b16_m(...) __riscv_vmsgeu_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u16m2_b8(...) __riscv_vmsgeu_vv_u16m2_b8(__VA_ARGS__) |
| #define | vmsgeu_vv_u16m2_b8_m(...) __riscv_vmsgeu_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u16m4_b4(...) __riscv_vmsgeu_vv_u16m4_b4(__VA_ARGS__) |
| #define | vmsgeu_vv_u16m4_b4_m(...) __riscv_vmsgeu_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u16m8_b2(...) __riscv_vmsgeu_vv_u16m8_b2(__VA_ARGS__) |
| #define | vmsgeu_vv_u16m8_b2_m(...) __riscv_vmsgeu_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u16mf2_b32(...) __riscv_vmsgeu_vv_u16mf2_b32(__VA_ARGS__) |
| #define | vmsgeu_vv_u16mf2_b32_m(...) __riscv_vmsgeu_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u16mf4_b64(...) __riscv_vmsgeu_vv_u16mf4_b64(__VA_ARGS__) |
| #define | vmsgeu_vv_u16mf4_b64_m(...) __riscv_vmsgeu_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u32m1_b32(...) __riscv_vmsgeu_vv_u32m1_b32(__VA_ARGS__) |
| #define | vmsgeu_vv_u32m1_b32_m(...) __riscv_vmsgeu_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u32m2_b16(...) __riscv_vmsgeu_vv_u32m2_b16(__VA_ARGS__) |
| #define | vmsgeu_vv_u32m2_b16_m(...) __riscv_vmsgeu_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u32m4_b8(...) __riscv_vmsgeu_vv_u32m4_b8(__VA_ARGS__) |
| #define | vmsgeu_vv_u32m4_b8_m(...) __riscv_vmsgeu_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u32m8_b4(...) __riscv_vmsgeu_vv_u32m8_b4(__VA_ARGS__) |
| #define | vmsgeu_vv_u32m8_b4_m(...) __riscv_vmsgeu_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u32mf2_b64(...) __riscv_vmsgeu_vv_u32mf2_b64(__VA_ARGS__) |
| #define | vmsgeu_vv_u32mf2_b64_m(...) __riscv_vmsgeu_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u64m1_b64(...) __riscv_vmsgeu_vv_u64m1_b64(__VA_ARGS__) |
| #define | vmsgeu_vv_u64m1_b64_m(...) __riscv_vmsgeu_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u64m2_b32(...) __riscv_vmsgeu_vv_u64m2_b32(__VA_ARGS__) |
| #define | vmsgeu_vv_u64m2_b32_m(...) __riscv_vmsgeu_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u64m4_b16(...) __riscv_vmsgeu_vv_u64m4_b16(__VA_ARGS__) |
| #define | vmsgeu_vv_u64m4_b16_m(...) __riscv_vmsgeu_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u64m8_b8(...) __riscv_vmsgeu_vv_u64m8_b8(__VA_ARGS__) |
| #define | vmsgeu_vv_u64m8_b8_m(...) __riscv_vmsgeu_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u8m1_b8(...) __riscv_vmsgeu_vv_u8m1_b8(__VA_ARGS__) |
| #define | vmsgeu_vv_u8m1_b8_m(...) __riscv_vmsgeu_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u8m2_b4(...) __riscv_vmsgeu_vv_u8m2_b4(__VA_ARGS__) |
| #define | vmsgeu_vv_u8m2_b4_m(...) __riscv_vmsgeu_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u8m4_b2(...) __riscv_vmsgeu_vv_u8m4_b2(__VA_ARGS__) |
| #define | vmsgeu_vv_u8m4_b2_m(...) __riscv_vmsgeu_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u8m8_b1(...) __riscv_vmsgeu_vv_u8m8_b1(__VA_ARGS__) |
| #define | vmsgeu_vv_u8m8_b1_m(...) __riscv_vmsgeu_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u8mf2_b16(...) __riscv_vmsgeu_vv_u8mf2_b16(__VA_ARGS__) |
| #define | vmsgeu_vv_u8mf2_b16_m(...) __riscv_vmsgeu_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u8mf4_b32(...) __riscv_vmsgeu_vv_u8mf4_b32(__VA_ARGS__) |
| #define | vmsgeu_vv_u8mf4_b32_m(...) __riscv_vmsgeu_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsgeu_vv_u8mf8_b64(...) __riscv_vmsgeu_vv_u8mf8_b64(__VA_ARGS__) |
| #define | vmsgeu_vv_u8mf8_b64_m(...) __riscv_vmsgeu_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u16m1_b16(...) __riscv_vmsgeu_vx_u16m1_b16(__VA_ARGS__) |
| #define | vmsgeu_vx_u16m1_b16_m(...) __riscv_vmsgeu_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u16m2_b8(...) __riscv_vmsgeu_vx_u16m2_b8(__VA_ARGS__) |
| #define | vmsgeu_vx_u16m2_b8_m(...) __riscv_vmsgeu_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u16m4_b4(...) __riscv_vmsgeu_vx_u16m4_b4(__VA_ARGS__) |
| #define | vmsgeu_vx_u16m4_b4_m(...) __riscv_vmsgeu_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u16m8_b2(...) __riscv_vmsgeu_vx_u16m8_b2(__VA_ARGS__) |
| #define | vmsgeu_vx_u16m8_b2_m(...) __riscv_vmsgeu_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u16mf2_b32(...) __riscv_vmsgeu_vx_u16mf2_b32(__VA_ARGS__) |
| #define | vmsgeu_vx_u16mf2_b32_m(...) __riscv_vmsgeu_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u16mf4_b64(...) __riscv_vmsgeu_vx_u16mf4_b64(__VA_ARGS__) |
| #define | vmsgeu_vx_u16mf4_b64_m(...) __riscv_vmsgeu_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u32m1_b32(...) __riscv_vmsgeu_vx_u32m1_b32(__VA_ARGS__) |
| #define | vmsgeu_vx_u32m1_b32_m(...) __riscv_vmsgeu_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u32m2_b16(...) __riscv_vmsgeu_vx_u32m2_b16(__VA_ARGS__) |
| #define | vmsgeu_vx_u32m2_b16_m(...) __riscv_vmsgeu_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u32m4_b8(...) __riscv_vmsgeu_vx_u32m4_b8(__VA_ARGS__) |
| #define | vmsgeu_vx_u32m4_b8_m(...) __riscv_vmsgeu_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u32m8_b4(...) __riscv_vmsgeu_vx_u32m8_b4(__VA_ARGS__) |
| #define | vmsgeu_vx_u32m8_b4_m(...) __riscv_vmsgeu_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u32mf2_b64(...) __riscv_vmsgeu_vx_u32mf2_b64(__VA_ARGS__) |
| #define | vmsgeu_vx_u32mf2_b64_m(...) __riscv_vmsgeu_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u64m1_b64(...) __riscv_vmsgeu_vx_u64m1_b64(__VA_ARGS__) |
| #define | vmsgeu_vx_u64m1_b64_m(...) __riscv_vmsgeu_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u64m2_b32(...) __riscv_vmsgeu_vx_u64m2_b32(__VA_ARGS__) |
| #define | vmsgeu_vx_u64m2_b32_m(...) __riscv_vmsgeu_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u64m4_b16(...) __riscv_vmsgeu_vx_u64m4_b16(__VA_ARGS__) |
| #define | vmsgeu_vx_u64m4_b16_m(...) __riscv_vmsgeu_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u64m8_b8(...) __riscv_vmsgeu_vx_u64m8_b8(__VA_ARGS__) |
| #define | vmsgeu_vx_u64m8_b8_m(...) __riscv_vmsgeu_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u8m1_b8(...) __riscv_vmsgeu_vx_u8m1_b8(__VA_ARGS__) |
| #define | vmsgeu_vx_u8m1_b8_m(...) __riscv_vmsgeu_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u8m2_b4(...) __riscv_vmsgeu_vx_u8m2_b4(__VA_ARGS__) |
| #define | vmsgeu_vx_u8m2_b4_m(...) __riscv_vmsgeu_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u8m4_b2(...) __riscv_vmsgeu_vx_u8m4_b2(__VA_ARGS__) |
| #define | vmsgeu_vx_u8m4_b2_m(...) __riscv_vmsgeu_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u8m8_b1(...) __riscv_vmsgeu_vx_u8m8_b1(__VA_ARGS__) |
| #define | vmsgeu_vx_u8m8_b1_m(...) __riscv_vmsgeu_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u8mf2_b16(...) __riscv_vmsgeu_vx_u8mf2_b16(__VA_ARGS__) |
| #define | vmsgeu_vx_u8mf2_b16_m(...) __riscv_vmsgeu_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u8mf4_b32(...) __riscv_vmsgeu_vx_u8mf4_b32(__VA_ARGS__) |
| #define | vmsgeu_vx_u8mf4_b32_m(...) __riscv_vmsgeu_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsgeu_vx_u8mf8_b64(...) __riscv_vmsgeu_vx_u8mf8_b64(__VA_ARGS__) |
| #define | vmsgeu_vx_u8mf8_b64_m(...) __riscv_vmsgeu_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i16m1_b16(...) __riscv_vmsgt_vv_i16m1_b16(__VA_ARGS__) |
| #define | vmsgt_vv_i16m1_b16_m(...) __riscv_vmsgt_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i16m2_b8(...) __riscv_vmsgt_vv_i16m2_b8(__VA_ARGS__) |
| #define | vmsgt_vv_i16m2_b8_m(...) __riscv_vmsgt_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i16m4_b4(...) __riscv_vmsgt_vv_i16m4_b4(__VA_ARGS__) |
| #define | vmsgt_vv_i16m4_b4_m(...) __riscv_vmsgt_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i16m8_b2(...) __riscv_vmsgt_vv_i16m8_b2(__VA_ARGS__) |
| #define | vmsgt_vv_i16m8_b2_m(...) __riscv_vmsgt_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i16mf2_b32(...) __riscv_vmsgt_vv_i16mf2_b32(__VA_ARGS__) |
| #define | vmsgt_vv_i16mf2_b32_m(...) __riscv_vmsgt_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i16mf4_b64(...) __riscv_vmsgt_vv_i16mf4_b64(__VA_ARGS__) |
| #define | vmsgt_vv_i16mf4_b64_m(...) __riscv_vmsgt_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i32m1_b32(...) __riscv_vmsgt_vv_i32m1_b32(__VA_ARGS__) |
| #define | vmsgt_vv_i32m1_b32_m(...) __riscv_vmsgt_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i32m2_b16(...) __riscv_vmsgt_vv_i32m2_b16(__VA_ARGS__) |
| #define | vmsgt_vv_i32m2_b16_m(...) __riscv_vmsgt_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i32m4_b8(...) __riscv_vmsgt_vv_i32m4_b8(__VA_ARGS__) |
| #define | vmsgt_vv_i32m4_b8_m(...) __riscv_vmsgt_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i32m8_b4(...) __riscv_vmsgt_vv_i32m8_b4(__VA_ARGS__) |
| #define | vmsgt_vv_i32m8_b4_m(...) __riscv_vmsgt_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i32mf2_b64(...) __riscv_vmsgt_vv_i32mf2_b64(__VA_ARGS__) |
| #define | vmsgt_vv_i32mf2_b64_m(...) __riscv_vmsgt_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i64m1_b64(...) __riscv_vmsgt_vv_i64m1_b64(__VA_ARGS__) |
| #define | vmsgt_vv_i64m1_b64_m(...) __riscv_vmsgt_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i64m2_b32(...) __riscv_vmsgt_vv_i64m2_b32(__VA_ARGS__) |
| #define | vmsgt_vv_i64m2_b32_m(...) __riscv_vmsgt_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i64m4_b16(...) __riscv_vmsgt_vv_i64m4_b16(__VA_ARGS__) |
| #define | vmsgt_vv_i64m4_b16_m(...) __riscv_vmsgt_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i64m8_b8(...) __riscv_vmsgt_vv_i64m8_b8(__VA_ARGS__) |
| #define | vmsgt_vv_i64m8_b8_m(...) __riscv_vmsgt_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i8m1_b8(...) __riscv_vmsgt_vv_i8m1_b8(__VA_ARGS__) |
| #define | vmsgt_vv_i8m1_b8_m(...) __riscv_vmsgt_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i8m2_b4(...) __riscv_vmsgt_vv_i8m2_b4(__VA_ARGS__) |
| #define | vmsgt_vv_i8m2_b4_m(...) __riscv_vmsgt_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i8m4_b2(...) __riscv_vmsgt_vv_i8m4_b2(__VA_ARGS__) |
| #define | vmsgt_vv_i8m4_b2_m(...) __riscv_vmsgt_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i8m8_b1(...) __riscv_vmsgt_vv_i8m8_b1(__VA_ARGS__) |
| #define | vmsgt_vv_i8m8_b1_m(...) __riscv_vmsgt_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i8mf2_b16(...) __riscv_vmsgt_vv_i8mf2_b16(__VA_ARGS__) |
| #define | vmsgt_vv_i8mf2_b16_m(...) __riscv_vmsgt_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i8mf4_b32(...) __riscv_vmsgt_vv_i8mf4_b32(__VA_ARGS__) |
| #define | vmsgt_vv_i8mf4_b32_m(...) __riscv_vmsgt_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsgt_vv_i8mf8_b64(...) __riscv_vmsgt_vv_i8mf8_b64(__VA_ARGS__) |
| #define | vmsgt_vv_i8mf8_b64_m(...) __riscv_vmsgt_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i16m1_b16(...) __riscv_vmsgt_vx_i16m1_b16(__VA_ARGS__) |
| #define | vmsgt_vx_i16m1_b16_m(...) __riscv_vmsgt_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i16m2_b8(...) __riscv_vmsgt_vx_i16m2_b8(__VA_ARGS__) |
| #define | vmsgt_vx_i16m2_b8_m(...) __riscv_vmsgt_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i16m4_b4(...) __riscv_vmsgt_vx_i16m4_b4(__VA_ARGS__) |
| #define | vmsgt_vx_i16m4_b4_m(...) __riscv_vmsgt_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i16m8_b2(...) __riscv_vmsgt_vx_i16m8_b2(__VA_ARGS__) |
| #define | vmsgt_vx_i16m8_b2_m(...) __riscv_vmsgt_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i16mf2_b32(...) __riscv_vmsgt_vx_i16mf2_b32(__VA_ARGS__) |
| #define | vmsgt_vx_i16mf2_b32_m(...) __riscv_vmsgt_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i16mf4_b64(...) __riscv_vmsgt_vx_i16mf4_b64(__VA_ARGS__) |
| #define | vmsgt_vx_i16mf4_b64_m(...) __riscv_vmsgt_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i32m1_b32(...) __riscv_vmsgt_vx_i32m1_b32(__VA_ARGS__) |
| #define | vmsgt_vx_i32m1_b32_m(...) __riscv_vmsgt_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i32m2_b16(...) __riscv_vmsgt_vx_i32m2_b16(__VA_ARGS__) |
| #define | vmsgt_vx_i32m2_b16_m(...) __riscv_vmsgt_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i32m4_b8(...) __riscv_vmsgt_vx_i32m4_b8(__VA_ARGS__) |
| #define | vmsgt_vx_i32m4_b8_m(...) __riscv_vmsgt_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i32m8_b4(...) __riscv_vmsgt_vx_i32m8_b4(__VA_ARGS__) |
| #define | vmsgt_vx_i32m8_b4_m(...) __riscv_vmsgt_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i32mf2_b64(...) __riscv_vmsgt_vx_i32mf2_b64(__VA_ARGS__) |
| #define | vmsgt_vx_i32mf2_b64_m(...) __riscv_vmsgt_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i64m1_b64(...) __riscv_vmsgt_vx_i64m1_b64(__VA_ARGS__) |
| #define | vmsgt_vx_i64m1_b64_m(...) __riscv_vmsgt_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i64m2_b32(...) __riscv_vmsgt_vx_i64m2_b32(__VA_ARGS__) |
| #define | vmsgt_vx_i64m2_b32_m(...) __riscv_vmsgt_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i64m4_b16(...) __riscv_vmsgt_vx_i64m4_b16(__VA_ARGS__) |
| #define | vmsgt_vx_i64m4_b16_m(...) __riscv_vmsgt_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i64m8_b8(...) __riscv_vmsgt_vx_i64m8_b8(__VA_ARGS__) |
| #define | vmsgt_vx_i64m8_b8_m(...) __riscv_vmsgt_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i8m1_b8(...) __riscv_vmsgt_vx_i8m1_b8(__VA_ARGS__) |
| #define | vmsgt_vx_i8m1_b8_m(...) __riscv_vmsgt_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i8m2_b4(...) __riscv_vmsgt_vx_i8m2_b4(__VA_ARGS__) |
| #define | vmsgt_vx_i8m2_b4_m(...) __riscv_vmsgt_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i8m4_b2(...) __riscv_vmsgt_vx_i8m4_b2(__VA_ARGS__) |
| #define | vmsgt_vx_i8m4_b2_m(...) __riscv_vmsgt_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i8m8_b1(...) __riscv_vmsgt_vx_i8m8_b1(__VA_ARGS__) |
| #define | vmsgt_vx_i8m8_b1_m(...) __riscv_vmsgt_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i8mf2_b16(...) __riscv_vmsgt_vx_i8mf2_b16(__VA_ARGS__) |
| #define | vmsgt_vx_i8mf2_b16_m(...) __riscv_vmsgt_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i8mf4_b32(...) __riscv_vmsgt_vx_i8mf4_b32(__VA_ARGS__) |
| #define | vmsgt_vx_i8mf4_b32_m(...) __riscv_vmsgt_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsgt_vx_i8mf8_b64(...) __riscv_vmsgt_vx_i8mf8_b64(__VA_ARGS__) |
| #define | vmsgt_vx_i8mf8_b64_m(...) __riscv_vmsgt_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u16m1_b16(...) __riscv_vmsgtu_vv_u16m1_b16(__VA_ARGS__) |
| #define | vmsgtu_vv_u16m1_b16_m(...) __riscv_vmsgtu_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u16m2_b8(...) __riscv_vmsgtu_vv_u16m2_b8(__VA_ARGS__) |
| #define | vmsgtu_vv_u16m2_b8_m(...) __riscv_vmsgtu_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u16m4_b4(...) __riscv_vmsgtu_vv_u16m4_b4(__VA_ARGS__) |
| #define | vmsgtu_vv_u16m4_b4_m(...) __riscv_vmsgtu_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u16m8_b2(...) __riscv_vmsgtu_vv_u16m8_b2(__VA_ARGS__) |
| #define | vmsgtu_vv_u16m8_b2_m(...) __riscv_vmsgtu_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u16mf2_b32(...) __riscv_vmsgtu_vv_u16mf2_b32(__VA_ARGS__) |
| #define | vmsgtu_vv_u16mf2_b32_m(...) __riscv_vmsgtu_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u16mf4_b64(...) __riscv_vmsgtu_vv_u16mf4_b64(__VA_ARGS__) |
| #define | vmsgtu_vv_u16mf4_b64_m(...) __riscv_vmsgtu_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u32m1_b32(...) __riscv_vmsgtu_vv_u32m1_b32(__VA_ARGS__) |
| #define | vmsgtu_vv_u32m1_b32_m(...) __riscv_vmsgtu_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u32m2_b16(...) __riscv_vmsgtu_vv_u32m2_b16(__VA_ARGS__) |
| #define | vmsgtu_vv_u32m2_b16_m(...) __riscv_vmsgtu_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u32m4_b8(...) __riscv_vmsgtu_vv_u32m4_b8(__VA_ARGS__) |
| #define | vmsgtu_vv_u32m4_b8_m(...) __riscv_vmsgtu_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u32m8_b4(...) __riscv_vmsgtu_vv_u32m8_b4(__VA_ARGS__) |
| #define | vmsgtu_vv_u32m8_b4_m(...) __riscv_vmsgtu_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u32mf2_b64(...) __riscv_vmsgtu_vv_u32mf2_b64(__VA_ARGS__) |
| #define | vmsgtu_vv_u32mf2_b64_m(...) __riscv_vmsgtu_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u64m1_b64(...) __riscv_vmsgtu_vv_u64m1_b64(__VA_ARGS__) |
| #define | vmsgtu_vv_u64m1_b64_m(...) __riscv_vmsgtu_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u64m2_b32(...) __riscv_vmsgtu_vv_u64m2_b32(__VA_ARGS__) |
| #define | vmsgtu_vv_u64m2_b32_m(...) __riscv_vmsgtu_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u64m4_b16(...) __riscv_vmsgtu_vv_u64m4_b16(__VA_ARGS__) |
| #define | vmsgtu_vv_u64m4_b16_m(...) __riscv_vmsgtu_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u64m8_b8(...) __riscv_vmsgtu_vv_u64m8_b8(__VA_ARGS__) |
| #define | vmsgtu_vv_u64m8_b8_m(...) __riscv_vmsgtu_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u8m1_b8(...) __riscv_vmsgtu_vv_u8m1_b8(__VA_ARGS__) |
| #define | vmsgtu_vv_u8m1_b8_m(...) __riscv_vmsgtu_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u8m2_b4(...) __riscv_vmsgtu_vv_u8m2_b4(__VA_ARGS__) |
| #define | vmsgtu_vv_u8m2_b4_m(...) __riscv_vmsgtu_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u8m4_b2(...) __riscv_vmsgtu_vv_u8m4_b2(__VA_ARGS__) |
| #define | vmsgtu_vv_u8m4_b2_m(...) __riscv_vmsgtu_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u8m8_b1(...) __riscv_vmsgtu_vv_u8m8_b1(__VA_ARGS__) |
| #define | vmsgtu_vv_u8m8_b1_m(...) __riscv_vmsgtu_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u8mf2_b16(...) __riscv_vmsgtu_vv_u8mf2_b16(__VA_ARGS__) |
| #define | vmsgtu_vv_u8mf2_b16_m(...) __riscv_vmsgtu_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u8mf4_b32(...) __riscv_vmsgtu_vv_u8mf4_b32(__VA_ARGS__) |
| #define | vmsgtu_vv_u8mf4_b32_m(...) __riscv_vmsgtu_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsgtu_vv_u8mf8_b64(...) __riscv_vmsgtu_vv_u8mf8_b64(__VA_ARGS__) |
| #define | vmsgtu_vv_u8mf8_b64_m(...) __riscv_vmsgtu_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u16m1_b16(...) __riscv_vmsgtu_vx_u16m1_b16(__VA_ARGS__) |
| #define | vmsgtu_vx_u16m1_b16_m(...) __riscv_vmsgtu_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u16m2_b8(...) __riscv_vmsgtu_vx_u16m2_b8(__VA_ARGS__) |
| #define | vmsgtu_vx_u16m2_b8_m(...) __riscv_vmsgtu_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u16m4_b4(...) __riscv_vmsgtu_vx_u16m4_b4(__VA_ARGS__) |
| #define | vmsgtu_vx_u16m4_b4_m(...) __riscv_vmsgtu_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u16m8_b2(...) __riscv_vmsgtu_vx_u16m8_b2(__VA_ARGS__) |
| #define | vmsgtu_vx_u16m8_b2_m(...) __riscv_vmsgtu_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u16mf2_b32(...) __riscv_vmsgtu_vx_u16mf2_b32(__VA_ARGS__) |
| #define | vmsgtu_vx_u16mf2_b32_m(...) __riscv_vmsgtu_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u16mf4_b64(...) __riscv_vmsgtu_vx_u16mf4_b64(__VA_ARGS__) |
| #define | vmsgtu_vx_u16mf4_b64_m(...) __riscv_vmsgtu_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u32m1_b32(...) __riscv_vmsgtu_vx_u32m1_b32(__VA_ARGS__) |
| #define | vmsgtu_vx_u32m1_b32_m(...) __riscv_vmsgtu_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u32m2_b16(...) __riscv_vmsgtu_vx_u32m2_b16(__VA_ARGS__) |
| #define | vmsgtu_vx_u32m2_b16_m(...) __riscv_vmsgtu_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u32m4_b8(...) __riscv_vmsgtu_vx_u32m4_b8(__VA_ARGS__) |
| #define | vmsgtu_vx_u32m4_b8_m(...) __riscv_vmsgtu_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u32m8_b4(...) __riscv_vmsgtu_vx_u32m8_b4(__VA_ARGS__) |
| #define | vmsgtu_vx_u32m8_b4_m(...) __riscv_vmsgtu_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u32mf2_b64(...) __riscv_vmsgtu_vx_u32mf2_b64(__VA_ARGS__) |
| #define | vmsgtu_vx_u32mf2_b64_m(...) __riscv_vmsgtu_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u64m1_b64(...) __riscv_vmsgtu_vx_u64m1_b64(__VA_ARGS__) |
| #define | vmsgtu_vx_u64m1_b64_m(...) __riscv_vmsgtu_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u64m2_b32(...) __riscv_vmsgtu_vx_u64m2_b32(__VA_ARGS__) |
| #define | vmsgtu_vx_u64m2_b32_m(...) __riscv_vmsgtu_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u64m4_b16(...) __riscv_vmsgtu_vx_u64m4_b16(__VA_ARGS__) |
| #define | vmsgtu_vx_u64m4_b16_m(...) __riscv_vmsgtu_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u64m8_b8(...) __riscv_vmsgtu_vx_u64m8_b8(__VA_ARGS__) |
| #define | vmsgtu_vx_u64m8_b8_m(...) __riscv_vmsgtu_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u8m1_b8(...) __riscv_vmsgtu_vx_u8m1_b8(__VA_ARGS__) |
| #define | vmsgtu_vx_u8m1_b8_m(...) __riscv_vmsgtu_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u8m2_b4(...) __riscv_vmsgtu_vx_u8m2_b4(__VA_ARGS__) |
| #define | vmsgtu_vx_u8m2_b4_m(...) __riscv_vmsgtu_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u8m4_b2(...) __riscv_vmsgtu_vx_u8m4_b2(__VA_ARGS__) |
| #define | vmsgtu_vx_u8m4_b2_m(...) __riscv_vmsgtu_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u8m8_b1(...) __riscv_vmsgtu_vx_u8m8_b1(__VA_ARGS__) |
| #define | vmsgtu_vx_u8m8_b1_m(...) __riscv_vmsgtu_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u8mf2_b16(...) __riscv_vmsgtu_vx_u8mf2_b16(__VA_ARGS__) |
| #define | vmsgtu_vx_u8mf2_b16_m(...) __riscv_vmsgtu_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u8mf4_b32(...) __riscv_vmsgtu_vx_u8mf4_b32(__VA_ARGS__) |
| #define | vmsgtu_vx_u8mf4_b32_m(...) __riscv_vmsgtu_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsgtu_vx_u8mf8_b64(...) __riscv_vmsgtu_vx_u8mf8_b64(__VA_ARGS__) |
| #define | vmsgtu_vx_u8mf8_b64_m(...) __riscv_vmsgtu_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsif_m_b1(...) __riscv_vmsif_m_b1(__VA_ARGS__) |
| #define | vmsif_m_b16(...) __riscv_vmsif_m_b16(__VA_ARGS__) |
| #define | vmsif_m_b16_m(...) __riscv_vmsif_m_b16_mu(__VA_ARGS__) |
| #define | vmsif_m_b1_m(...) __riscv_vmsif_m_b1_mu(__VA_ARGS__) |
| #define | vmsif_m_b2(...) __riscv_vmsif_m_b2(__VA_ARGS__) |
| #define | vmsif_m_b2_m(...) __riscv_vmsif_m_b2_mu(__VA_ARGS__) |
| #define | vmsif_m_b32(...) __riscv_vmsif_m_b32(__VA_ARGS__) |
| #define | vmsif_m_b32_m(...) __riscv_vmsif_m_b32_mu(__VA_ARGS__) |
| #define | vmsif_m_b4(...) __riscv_vmsif_m_b4(__VA_ARGS__) |
| #define | vmsif_m_b4_m(...) __riscv_vmsif_m_b4_mu(__VA_ARGS__) |
| #define | vmsif_m_b64(...) __riscv_vmsif_m_b64(__VA_ARGS__) |
| #define | vmsif_m_b64_m(...) __riscv_vmsif_m_b64_mu(__VA_ARGS__) |
| #define | vmsif_m_b8(...) __riscv_vmsif_m_b8(__VA_ARGS__) |
| #define | vmsif_m_b8_m(...) __riscv_vmsif_m_b8_mu(__VA_ARGS__) |
| #define | vmsle_vv_i16m1_b16(...) __riscv_vmsle_vv_i16m1_b16(__VA_ARGS__) |
| #define | vmsle_vv_i16m1_b16_m(...) __riscv_vmsle_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmsle_vv_i16m2_b8(...) __riscv_vmsle_vv_i16m2_b8(__VA_ARGS__) |
| #define | vmsle_vv_i16m2_b8_m(...) __riscv_vmsle_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmsle_vv_i16m4_b4(...) __riscv_vmsle_vv_i16m4_b4(__VA_ARGS__) |
| #define | vmsle_vv_i16m4_b4_m(...) __riscv_vmsle_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmsle_vv_i16m8_b2(...) __riscv_vmsle_vv_i16m8_b2(__VA_ARGS__) |
| #define | vmsle_vv_i16m8_b2_m(...) __riscv_vmsle_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmsle_vv_i16mf2_b32(...) __riscv_vmsle_vv_i16mf2_b32(__VA_ARGS__) |
| #define | vmsle_vv_i16mf2_b32_m(...) __riscv_vmsle_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsle_vv_i16mf4_b64(...) __riscv_vmsle_vv_i16mf4_b64(__VA_ARGS__) |
| #define | vmsle_vv_i16mf4_b64_m(...) __riscv_vmsle_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsle_vv_i32m1_b32(...) __riscv_vmsle_vv_i32m1_b32(__VA_ARGS__) |
| #define | vmsle_vv_i32m1_b32_m(...) __riscv_vmsle_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmsle_vv_i32m2_b16(...) __riscv_vmsle_vv_i32m2_b16(__VA_ARGS__) |
| #define | vmsle_vv_i32m2_b16_m(...) __riscv_vmsle_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmsle_vv_i32m4_b8(...) __riscv_vmsle_vv_i32m4_b8(__VA_ARGS__) |
| #define | vmsle_vv_i32m4_b8_m(...) __riscv_vmsle_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmsle_vv_i32m8_b4(...) __riscv_vmsle_vv_i32m8_b4(__VA_ARGS__) |
| #define | vmsle_vv_i32m8_b4_m(...) __riscv_vmsle_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmsle_vv_i32mf2_b64(...) __riscv_vmsle_vv_i32mf2_b64(__VA_ARGS__) |
| #define | vmsle_vv_i32mf2_b64_m(...) __riscv_vmsle_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsle_vv_i64m1_b64(...) __riscv_vmsle_vv_i64m1_b64(__VA_ARGS__) |
| #define | vmsle_vv_i64m1_b64_m(...) __riscv_vmsle_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmsle_vv_i64m2_b32(...) __riscv_vmsle_vv_i64m2_b32(__VA_ARGS__) |
| #define | vmsle_vv_i64m2_b32_m(...) __riscv_vmsle_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmsle_vv_i64m4_b16(...) __riscv_vmsle_vv_i64m4_b16(__VA_ARGS__) |
| #define | vmsle_vv_i64m4_b16_m(...) __riscv_vmsle_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmsle_vv_i64m8_b8(...) __riscv_vmsle_vv_i64m8_b8(__VA_ARGS__) |
| #define | vmsle_vv_i64m8_b8_m(...) __riscv_vmsle_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmsle_vv_i8m1_b8(...) __riscv_vmsle_vv_i8m1_b8(__VA_ARGS__) |
| #define | vmsle_vv_i8m1_b8_m(...) __riscv_vmsle_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmsle_vv_i8m2_b4(...) __riscv_vmsle_vv_i8m2_b4(__VA_ARGS__) |
| #define | vmsle_vv_i8m2_b4_m(...) __riscv_vmsle_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmsle_vv_i8m4_b2(...) __riscv_vmsle_vv_i8m4_b2(__VA_ARGS__) |
| #define | vmsle_vv_i8m4_b2_m(...) __riscv_vmsle_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmsle_vv_i8m8_b1(...) __riscv_vmsle_vv_i8m8_b1(__VA_ARGS__) |
| #define | vmsle_vv_i8m8_b1_m(...) __riscv_vmsle_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmsle_vv_i8mf2_b16(...) __riscv_vmsle_vv_i8mf2_b16(__VA_ARGS__) |
| #define | vmsle_vv_i8mf2_b16_m(...) __riscv_vmsle_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsle_vv_i8mf4_b32(...) __riscv_vmsle_vv_i8mf4_b32(__VA_ARGS__) |
| #define | vmsle_vv_i8mf4_b32_m(...) __riscv_vmsle_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsle_vv_i8mf8_b64(...) __riscv_vmsle_vv_i8mf8_b64(__VA_ARGS__) |
| #define | vmsle_vv_i8mf8_b64_m(...) __riscv_vmsle_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsle_vx_i16m1_b16(...) __riscv_vmsle_vx_i16m1_b16(__VA_ARGS__) |
| #define | vmsle_vx_i16m1_b16_m(...) __riscv_vmsle_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmsle_vx_i16m2_b8(...) __riscv_vmsle_vx_i16m2_b8(__VA_ARGS__) |
| #define | vmsle_vx_i16m2_b8_m(...) __riscv_vmsle_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmsle_vx_i16m4_b4(...) __riscv_vmsle_vx_i16m4_b4(__VA_ARGS__) |
| #define | vmsle_vx_i16m4_b4_m(...) __riscv_vmsle_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmsle_vx_i16m8_b2(...) __riscv_vmsle_vx_i16m8_b2(__VA_ARGS__) |
| #define | vmsle_vx_i16m8_b2_m(...) __riscv_vmsle_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmsle_vx_i16mf2_b32(...) __riscv_vmsle_vx_i16mf2_b32(__VA_ARGS__) |
| #define | vmsle_vx_i16mf2_b32_m(...) __riscv_vmsle_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsle_vx_i16mf4_b64(...) __riscv_vmsle_vx_i16mf4_b64(__VA_ARGS__) |
| #define | vmsle_vx_i16mf4_b64_m(...) __riscv_vmsle_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsle_vx_i32m1_b32(...) __riscv_vmsle_vx_i32m1_b32(__VA_ARGS__) |
| #define | vmsle_vx_i32m1_b32_m(...) __riscv_vmsle_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmsle_vx_i32m2_b16(...) __riscv_vmsle_vx_i32m2_b16(__VA_ARGS__) |
| #define | vmsle_vx_i32m2_b16_m(...) __riscv_vmsle_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmsle_vx_i32m4_b8(...) __riscv_vmsle_vx_i32m4_b8(__VA_ARGS__) |
| #define | vmsle_vx_i32m4_b8_m(...) __riscv_vmsle_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmsle_vx_i32m8_b4(...) __riscv_vmsle_vx_i32m8_b4(__VA_ARGS__) |
| #define | vmsle_vx_i32m8_b4_m(...) __riscv_vmsle_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmsle_vx_i32mf2_b64(...) __riscv_vmsle_vx_i32mf2_b64(__VA_ARGS__) |
| #define | vmsle_vx_i32mf2_b64_m(...) __riscv_vmsle_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsle_vx_i64m1_b64(...) __riscv_vmsle_vx_i64m1_b64(__VA_ARGS__) |
| #define | vmsle_vx_i64m1_b64_m(...) __riscv_vmsle_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmsle_vx_i64m2_b32(...) __riscv_vmsle_vx_i64m2_b32(__VA_ARGS__) |
| #define | vmsle_vx_i64m2_b32_m(...) __riscv_vmsle_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmsle_vx_i64m4_b16(...) __riscv_vmsle_vx_i64m4_b16(__VA_ARGS__) |
| #define | vmsle_vx_i64m4_b16_m(...) __riscv_vmsle_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmsle_vx_i64m8_b8(...) __riscv_vmsle_vx_i64m8_b8(__VA_ARGS__) |
| #define | vmsle_vx_i64m8_b8_m(...) __riscv_vmsle_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmsle_vx_i8m1_b8(...) __riscv_vmsle_vx_i8m1_b8(__VA_ARGS__) |
| #define | vmsle_vx_i8m1_b8_m(...) __riscv_vmsle_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmsle_vx_i8m2_b4(...) __riscv_vmsle_vx_i8m2_b4(__VA_ARGS__) |
| #define | vmsle_vx_i8m2_b4_m(...) __riscv_vmsle_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmsle_vx_i8m4_b2(...) __riscv_vmsle_vx_i8m4_b2(__VA_ARGS__) |
| #define | vmsle_vx_i8m4_b2_m(...) __riscv_vmsle_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmsle_vx_i8m8_b1(...) __riscv_vmsle_vx_i8m8_b1(__VA_ARGS__) |
| #define | vmsle_vx_i8m8_b1_m(...) __riscv_vmsle_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmsle_vx_i8mf2_b16(...) __riscv_vmsle_vx_i8mf2_b16(__VA_ARGS__) |
| #define | vmsle_vx_i8mf2_b16_m(...) __riscv_vmsle_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsle_vx_i8mf4_b32(...) __riscv_vmsle_vx_i8mf4_b32(__VA_ARGS__) |
| #define | vmsle_vx_i8mf4_b32_m(...) __riscv_vmsle_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsle_vx_i8mf8_b64(...) __riscv_vmsle_vx_i8mf8_b64(__VA_ARGS__) |
| #define | vmsle_vx_i8mf8_b64_m(...) __riscv_vmsle_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u16m1_b16(...) __riscv_vmsleu_vv_u16m1_b16(__VA_ARGS__) |
| #define | vmsleu_vv_u16m1_b16_m(...) __riscv_vmsleu_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u16m2_b8(...) __riscv_vmsleu_vv_u16m2_b8(__VA_ARGS__) |
| #define | vmsleu_vv_u16m2_b8_m(...) __riscv_vmsleu_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u16m4_b4(...) __riscv_vmsleu_vv_u16m4_b4(__VA_ARGS__) |
| #define | vmsleu_vv_u16m4_b4_m(...) __riscv_vmsleu_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u16m8_b2(...) __riscv_vmsleu_vv_u16m8_b2(__VA_ARGS__) |
| #define | vmsleu_vv_u16m8_b2_m(...) __riscv_vmsleu_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u16mf2_b32(...) __riscv_vmsleu_vv_u16mf2_b32(__VA_ARGS__) |
| #define | vmsleu_vv_u16mf2_b32_m(...) __riscv_vmsleu_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u16mf4_b64(...) __riscv_vmsleu_vv_u16mf4_b64(__VA_ARGS__) |
| #define | vmsleu_vv_u16mf4_b64_m(...) __riscv_vmsleu_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u32m1_b32(...) __riscv_vmsleu_vv_u32m1_b32(__VA_ARGS__) |
| #define | vmsleu_vv_u32m1_b32_m(...) __riscv_vmsleu_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u32m2_b16(...) __riscv_vmsleu_vv_u32m2_b16(__VA_ARGS__) |
| #define | vmsleu_vv_u32m2_b16_m(...) __riscv_vmsleu_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u32m4_b8(...) __riscv_vmsleu_vv_u32m4_b8(__VA_ARGS__) |
| #define | vmsleu_vv_u32m4_b8_m(...) __riscv_vmsleu_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u32m8_b4(...) __riscv_vmsleu_vv_u32m8_b4(__VA_ARGS__) |
| #define | vmsleu_vv_u32m8_b4_m(...) __riscv_vmsleu_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u32mf2_b64(...) __riscv_vmsleu_vv_u32mf2_b64(__VA_ARGS__) |
| #define | vmsleu_vv_u32mf2_b64_m(...) __riscv_vmsleu_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u64m1_b64(...) __riscv_vmsleu_vv_u64m1_b64(__VA_ARGS__) |
| #define | vmsleu_vv_u64m1_b64_m(...) __riscv_vmsleu_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u64m2_b32(...) __riscv_vmsleu_vv_u64m2_b32(__VA_ARGS__) |
| #define | vmsleu_vv_u64m2_b32_m(...) __riscv_vmsleu_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u64m4_b16(...) __riscv_vmsleu_vv_u64m4_b16(__VA_ARGS__) |
| #define | vmsleu_vv_u64m4_b16_m(...) __riscv_vmsleu_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u64m8_b8(...) __riscv_vmsleu_vv_u64m8_b8(__VA_ARGS__) |
| #define | vmsleu_vv_u64m8_b8_m(...) __riscv_vmsleu_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u8m1_b8(...) __riscv_vmsleu_vv_u8m1_b8(__VA_ARGS__) |
| #define | vmsleu_vv_u8m1_b8_m(...) __riscv_vmsleu_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u8m2_b4(...) __riscv_vmsleu_vv_u8m2_b4(__VA_ARGS__) |
| #define | vmsleu_vv_u8m2_b4_m(...) __riscv_vmsleu_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u8m4_b2(...) __riscv_vmsleu_vv_u8m4_b2(__VA_ARGS__) |
| #define | vmsleu_vv_u8m4_b2_m(...) __riscv_vmsleu_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u8m8_b1(...) __riscv_vmsleu_vv_u8m8_b1(__VA_ARGS__) |
| #define | vmsleu_vv_u8m8_b1_m(...) __riscv_vmsleu_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u8mf2_b16(...) __riscv_vmsleu_vv_u8mf2_b16(__VA_ARGS__) |
| #define | vmsleu_vv_u8mf2_b16_m(...) __riscv_vmsleu_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u8mf4_b32(...) __riscv_vmsleu_vv_u8mf4_b32(__VA_ARGS__) |
| #define | vmsleu_vv_u8mf4_b32_m(...) __riscv_vmsleu_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsleu_vv_u8mf8_b64(...) __riscv_vmsleu_vv_u8mf8_b64(__VA_ARGS__) |
| #define | vmsleu_vv_u8mf8_b64_m(...) __riscv_vmsleu_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u16m1_b16(...) __riscv_vmsleu_vx_u16m1_b16(__VA_ARGS__) |
| #define | vmsleu_vx_u16m1_b16_m(...) __riscv_vmsleu_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u16m2_b8(...) __riscv_vmsleu_vx_u16m2_b8(__VA_ARGS__) |
| #define | vmsleu_vx_u16m2_b8_m(...) __riscv_vmsleu_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u16m4_b4(...) __riscv_vmsleu_vx_u16m4_b4(__VA_ARGS__) |
| #define | vmsleu_vx_u16m4_b4_m(...) __riscv_vmsleu_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u16m8_b2(...) __riscv_vmsleu_vx_u16m8_b2(__VA_ARGS__) |
| #define | vmsleu_vx_u16m8_b2_m(...) __riscv_vmsleu_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u16mf2_b32(...) __riscv_vmsleu_vx_u16mf2_b32(__VA_ARGS__) |
| #define | vmsleu_vx_u16mf2_b32_m(...) __riscv_vmsleu_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u16mf4_b64(...) __riscv_vmsleu_vx_u16mf4_b64(__VA_ARGS__) |
| #define | vmsleu_vx_u16mf4_b64_m(...) __riscv_vmsleu_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u32m1_b32(...) __riscv_vmsleu_vx_u32m1_b32(__VA_ARGS__) |
| #define | vmsleu_vx_u32m1_b32_m(...) __riscv_vmsleu_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u32m2_b16(...) __riscv_vmsleu_vx_u32m2_b16(__VA_ARGS__) |
| #define | vmsleu_vx_u32m2_b16_m(...) __riscv_vmsleu_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u32m4_b8(...) __riscv_vmsleu_vx_u32m4_b8(__VA_ARGS__) |
| #define | vmsleu_vx_u32m4_b8_m(...) __riscv_vmsleu_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u32m8_b4(...) __riscv_vmsleu_vx_u32m8_b4(__VA_ARGS__) |
| #define | vmsleu_vx_u32m8_b4_m(...) __riscv_vmsleu_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u32mf2_b64(...) __riscv_vmsleu_vx_u32mf2_b64(__VA_ARGS__) |
| #define | vmsleu_vx_u32mf2_b64_m(...) __riscv_vmsleu_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u64m1_b64(...) __riscv_vmsleu_vx_u64m1_b64(__VA_ARGS__) |
| #define | vmsleu_vx_u64m1_b64_m(...) __riscv_vmsleu_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u64m2_b32(...) __riscv_vmsleu_vx_u64m2_b32(__VA_ARGS__) |
| #define | vmsleu_vx_u64m2_b32_m(...) __riscv_vmsleu_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u64m4_b16(...) __riscv_vmsleu_vx_u64m4_b16(__VA_ARGS__) |
| #define | vmsleu_vx_u64m4_b16_m(...) __riscv_vmsleu_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u64m8_b8(...) __riscv_vmsleu_vx_u64m8_b8(__VA_ARGS__) |
| #define | vmsleu_vx_u64m8_b8_m(...) __riscv_vmsleu_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u8m1_b8(...) __riscv_vmsleu_vx_u8m1_b8(__VA_ARGS__) |
| #define | vmsleu_vx_u8m1_b8_m(...) __riscv_vmsleu_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u8m2_b4(...) __riscv_vmsleu_vx_u8m2_b4(__VA_ARGS__) |
| #define | vmsleu_vx_u8m2_b4_m(...) __riscv_vmsleu_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u8m4_b2(...) __riscv_vmsleu_vx_u8m4_b2(__VA_ARGS__) |
| #define | vmsleu_vx_u8m4_b2_m(...) __riscv_vmsleu_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u8m8_b1(...) __riscv_vmsleu_vx_u8m8_b1(__VA_ARGS__) |
| #define | vmsleu_vx_u8m8_b1_m(...) __riscv_vmsleu_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u8mf2_b16(...) __riscv_vmsleu_vx_u8mf2_b16(__VA_ARGS__) |
| #define | vmsleu_vx_u8mf2_b16_m(...) __riscv_vmsleu_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u8mf4_b32(...) __riscv_vmsleu_vx_u8mf4_b32(__VA_ARGS__) |
| #define | vmsleu_vx_u8mf4_b32_m(...) __riscv_vmsleu_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsleu_vx_u8mf8_b64(...) __riscv_vmsleu_vx_u8mf8_b64(__VA_ARGS__) |
| #define | vmsleu_vx_u8mf8_b64_m(...) __riscv_vmsleu_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmslt_vv_i16m1_b16(...) __riscv_vmslt_vv_i16m1_b16(__VA_ARGS__) |
| #define | vmslt_vv_i16m1_b16_m(...) __riscv_vmslt_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmslt_vv_i16m2_b8(...) __riscv_vmslt_vv_i16m2_b8(__VA_ARGS__) |
| #define | vmslt_vv_i16m2_b8_m(...) __riscv_vmslt_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmslt_vv_i16m4_b4(...) __riscv_vmslt_vv_i16m4_b4(__VA_ARGS__) |
| #define | vmslt_vv_i16m4_b4_m(...) __riscv_vmslt_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmslt_vv_i16m8_b2(...) __riscv_vmslt_vv_i16m8_b2(__VA_ARGS__) |
| #define | vmslt_vv_i16m8_b2_m(...) __riscv_vmslt_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmslt_vv_i16mf2_b32(...) __riscv_vmslt_vv_i16mf2_b32(__VA_ARGS__) |
| #define | vmslt_vv_i16mf2_b32_m(...) __riscv_vmslt_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmslt_vv_i16mf4_b64(...) __riscv_vmslt_vv_i16mf4_b64(__VA_ARGS__) |
| #define | vmslt_vv_i16mf4_b64_m(...) __riscv_vmslt_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmslt_vv_i32m1_b32(...) __riscv_vmslt_vv_i32m1_b32(__VA_ARGS__) |
| #define | vmslt_vv_i32m1_b32_m(...) __riscv_vmslt_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmslt_vv_i32m2_b16(...) __riscv_vmslt_vv_i32m2_b16(__VA_ARGS__) |
| #define | vmslt_vv_i32m2_b16_m(...) __riscv_vmslt_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmslt_vv_i32m4_b8(...) __riscv_vmslt_vv_i32m4_b8(__VA_ARGS__) |
| #define | vmslt_vv_i32m4_b8_m(...) __riscv_vmslt_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmslt_vv_i32m8_b4(...) __riscv_vmslt_vv_i32m8_b4(__VA_ARGS__) |
| #define | vmslt_vv_i32m8_b4_m(...) __riscv_vmslt_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmslt_vv_i32mf2_b64(...) __riscv_vmslt_vv_i32mf2_b64(__VA_ARGS__) |
| #define | vmslt_vv_i32mf2_b64_m(...) __riscv_vmslt_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmslt_vv_i64m1_b64(...) __riscv_vmslt_vv_i64m1_b64(__VA_ARGS__) |
| #define | vmslt_vv_i64m1_b64_m(...) __riscv_vmslt_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmslt_vv_i64m2_b32(...) __riscv_vmslt_vv_i64m2_b32(__VA_ARGS__) |
| #define | vmslt_vv_i64m2_b32_m(...) __riscv_vmslt_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmslt_vv_i64m4_b16(...) __riscv_vmslt_vv_i64m4_b16(__VA_ARGS__) |
| #define | vmslt_vv_i64m4_b16_m(...) __riscv_vmslt_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmslt_vv_i64m8_b8(...) __riscv_vmslt_vv_i64m8_b8(__VA_ARGS__) |
| #define | vmslt_vv_i64m8_b8_m(...) __riscv_vmslt_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmslt_vv_i8m1_b8(...) __riscv_vmslt_vv_i8m1_b8(__VA_ARGS__) |
| #define | vmslt_vv_i8m1_b8_m(...) __riscv_vmslt_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmslt_vv_i8m2_b4(...) __riscv_vmslt_vv_i8m2_b4(__VA_ARGS__) |
| #define | vmslt_vv_i8m2_b4_m(...) __riscv_vmslt_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmslt_vv_i8m4_b2(...) __riscv_vmslt_vv_i8m4_b2(__VA_ARGS__) |
| #define | vmslt_vv_i8m4_b2_m(...) __riscv_vmslt_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmslt_vv_i8m8_b1(...) __riscv_vmslt_vv_i8m8_b1(__VA_ARGS__) |
| #define | vmslt_vv_i8m8_b1_m(...) __riscv_vmslt_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmslt_vv_i8mf2_b16(...) __riscv_vmslt_vv_i8mf2_b16(__VA_ARGS__) |
| #define | vmslt_vv_i8mf2_b16_m(...) __riscv_vmslt_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmslt_vv_i8mf4_b32(...) __riscv_vmslt_vv_i8mf4_b32(__VA_ARGS__) |
| #define | vmslt_vv_i8mf4_b32_m(...) __riscv_vmslt_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmslt_vv_i8mf8_b64(...) __riscv_vmslt_vv_i8mf8_b64(__VA_ARGS__) |
| #define | vmslt_vv_i8mf8_b64_m(...) __riscv_vmslt_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmslt_vx_i16m1_b16(...) __riscv_vmslt_vx_i16m1_b16(__VA_ARGS__) |
| #define | vmslt_vx_i16m1_b16_m(...) __riscv_vmslt_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmslt_vx_i16m2_b8(...) __riscv_vmslt_vx_i16m2_b8(__VA_ARGS__) |
| #define | vmslt_vx_i16m2_b8_m(...) __riscv_vmslt_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmslt_vx_i16m4_b4(...) __riscv_vmslt_vx_i16m4_b4(__VA_ARGS__) |
| #define | vmslt_vx_i16m4_b4_m(...) __riscv_vmslt_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmslt_vx_i16m8_b2(...) __riscv_vmslt_vx_i16m8_b2(__VA_ARGS__) |
| #define | vmslt_vx_i16m8_b2_m(...) __riscv_vmslt_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmslt_vx_i16mf2_b32(...) __riscv_vmslt_vx_i16mf2_b32(__VA_ARGS__) |
| #define | vmslt_vx_i16mf2_b32_m(...) __riscv_vmslt_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmslt_vx_i16mf4_b64(...) __riscv_vmslt_vx_i16mf4_b64(__VA_ARGS__) |
| #define | vmslt_vx_i16mf4_b64_m(...) __riscv_vmslt_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmslt_vx_i32m1_b32(...) __riscv_vmslt_vx_i32m1_b32(__VA_ARGS__) |
| #define | vmslt_vx_i32m1_b32_m(...) __riscv_vmslt_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmslt_vx_i32m2_b16(...) __riscv_vmslt_vx_i32m2_b16(__VA_ARGS__) |
| #define | vmslt_vx_i32m2_b16_m(...) __riscv_vmslt_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmslt_vx_i32m4_b8(...) __riscv_vmslt_vx_i32m4_b8(__VA_ARGS__) |
| #define | vmslt_vx_i32m4_b8_m(...) __riscv_vmslt_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmslt_vx_i32m8_b4(...) __riscv_vmslt_vx_i32m8_b4(__VA_ARGS__) |
| #define | vmslt_vx_i32m8_b4_m(...) __riscv_vmslt_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmslt_vx_i32mf2_b64(...) __riscv_vmslt_vx_i32mf2_b64(__VA_ARGS__) |
| #define | vmslt_vx_i32mf2_b64_m(...) __riscv_vmslt_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmslt_vx_i64m1_b64(...) __riscv_vmslt_vx_i64m1_b64(__VA_ARGS__) |
| #define | vmslt_vx_i64m1_b64_m(...) __riscv_vmslt_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmslt_vx_i64m2_b32(...) __riscv_vmslt_vx_i64m2_b32(__VA_ARGS__) |
| #define | vmslt_vx_i64m2_b32_m(...) __riscv_vmslt_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmslt_vx_i64m4_b16(...) __riscv_vmslt_vx_i64m4_b16(__VA_ARGS__) |
| #define | vmslt_vx_i64m4_b16_m(...) __riscv_vmslt_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmslt_vx_i64m8_b8(...) __riscv_vmslt_vx_i64m8_b8(__VA_ARGS__) |
| #define | vmslt_vx_i64m8_b8_m(...) __riscv_vmslt_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmslt_vx_i8m1_b8(...) __riscv_vmslt_vx_i8m1_b8(__VA_ARGS__) |
| #define | vmslt_vx_i8m1_b8_m(...) __riscv_vmslt_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmslt_vx_i8m2_b4(...) __riscv_vmslt_vx_i8m2_b4(__VA_ARGS__) |
| #define | vmslt_vx_i8m2_b4_m(...) __riscv_vmslt_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmslt_vx_i8m4_b2(...) __riscv_vmslt_vx_i8m4_b2(__VA_ARGS__) |
| #define | vmslt_vx_i8m4_b2_m(...) __riscv_vmslt_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmslt_vx_i8m8_b1(...) __riscv_vmslt_vx_i8m8_b1(__VA_ARGS__) |
| #define | vmslt_vx_i8m8_b1_m(...) __riscv_vmslt_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmslt_vx_i8mf2_b16(...) __riscv_vmslt_vx_i8mf2_b16(__VA_ARGS__) |
| #define | vmslt_vx_i8mf2_b16_m(...) __riscv_vmslt_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmslt_vx_i8mf4_b32(...) __riscv_vmslt_vx_i8mf4_b32(__VA_ARGS__) |
| #define | vmslt_vx_i8mf4_b32_m(...) __riscv_vmslt_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmslt_vx_i8mf8_b64(...) __riscv_vmslt_vx_i8mf8_b64(__VA_ARGS__) |
| #define | vmslt_vx_i8mf8_b64_m(...) __riscv_vmslt_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u16m1_b16(...) __riscv_vmsltu_vv_u16m1_b16(__VA_ARGS__) |
| #define | vmsltu_vv_u16m1_b16_m(...) __riscv_vmsltu_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u16m2_b8(...) __riscv_vmsltu_vv_u16m2_b8(__VA_ARGS__) |
| #define | vmsltu_vv_u16m2_b8_m(...) __riscv_vmsltu_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u16m4_b4(...) __riscv_vmsltu_vv_u16m4_b4(__VA_ARGS__) |
| #define | vmsltu_vv_u16m4_b4_m(...) __riscv_vmsltu_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u16m8_b2(...) __riscv_vmsltu_vv_u16m8_b2(__VA_ARGS__) |
| #define | vmsltu_vv_u16m8_b2_m(...) __riscv_vmsltu_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u16mf2_b32(...) __riscv_vmsltu_vv_u16mf2_b32(__VA_ARGS__) |
| #define | vmsltu_vv_u16mf2_b32_m(...) __riscv_vmsltu_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u16mf4_b64(...) __riscv_vmsltu_vv_u16mf4_b64(__VA_ARGS__) |
| #define | vmsltu_vv_u16mf4_b64_m(...) __riscv_vmsltu_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u32m1_b32(...) __riscv_vmsltu_vv_u32m1_b32(__VA_ARGS__) |
| #define | vmsltu_vv_u32m1_b32_m(...) __riscv_vmsltu_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u32m2_b16(...) __riscv_vmsltu_vv_u32m2_b16(__VA_ARGS__) |
| #define | vmsltu_vv_u32m2_b16_m(...) __riscv_vmsltu_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u32m4_b8(...) __riscv_vmsltu_vv_u32m4_b8(__VA_ARGS__) |
| #define | vmsltu_vv_u32m4_b8_m(...) __riscv_vmsltu_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u32m8_b4(...) __riscv_vmsltu_vv_u32m8_b4(__VA_ARGS__) |
| #define | vmsltu_vv_u32m8_b4_m(...) __riscv_vmsltu_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u32mf2_b64(...) __riscv_vmsltu_vv_u32mf2_b64(__VA_ARGS__) |
| #define | vmsltu_vv_u32mf2_b64_m(...) __riscv_vmsltu_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u64m1_b64(...) __riscv_vmsltu_vv_u64m1_b64(__VA_ARGS__) |
| #define | vmsltu_vv_u64m1_b64_m(...) __riscv_vmsltu_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u64m2_b32(...) __riscv_vmsltu_vv_u64m2_b32(__VA_ARGS__) |
| #define | vmsltu_vv_u64m2_b32_m(...) __riscv_vmsltu_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u64m4_b16(...) __riscv_vmsltu_vv_u64m4_b16(__VA_ARGS__) |
| #define | vmsltu_vv_u64m4_b16_m(...) __riscv_vmsltu_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u64m8_b8(...) __riscv_vmsltu_vv_u64m8_b8(__VA_ARGS__) |
| #define | vmsltu_vv_u64m8_b8_m(...) __riscv_vmsltu_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u8m1_b8(...) __riscv_vmsltu_vv_u8m1_b8(__VA_ARGS__) |
| #define | vmsltu_vv_u8m1_b8_m(...) __riscv_vmsltu_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u8m2_b4(...) __riscv_vmsltu_vv_u8m2_b4(__VA_ARGS__) |
| #define | vmsltu_vv_u8m2_b4_m(...) __riscv_vmsltu_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u8m4_b2(...) __riscv_vmsltu_vv_u8m4_b2(__VA_ARGS__) |
| #define | vmsltu_vv_u8m4_b2_m(...) __riscv_vmsltu_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u8m8_b1(...) __riscv_vmsltu_vv_u8m8_b1(__VA_ARGS__) |
| #define | vmsltu_vv_u8m8_b1_m(...) __riscv_vmsltu_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u8mf2_b16(...) __riscv_vmsltu_vv_u8mf2_b16(__VA_ARGS__) |
| #define | vmsltu_vv_u8mf2_b16_m(...) __riscv_vmsltu_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u8mf4_b32(...) __riscv_vmsltu_vv_u8mf4_b32(__VA_ARGS__) |
| #define | vmsltu_vv_u8mf4_b32_m(...) __riscv_vmsltu_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsltu_vv_u8mf8_b64(...) __riscv_vmsltu_vv_u8mf8_b64(__VA_ARGS__) |
| #define | vmsltu_vv_u8mf8_b64_m(...) __riscv_vmsltu_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u16m1_b16(...) __riscv_vmsltu_vx_u16m1_b16(__VA_ARGS__) |
| #define | vmsltu_vx_u16m1_b16_m(...) __riscv_vmsltu_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u16m2_b8(...) __riscv_vmsltu_vx_u16m2_b8(__VA_ARGS__) |
| #define | vmsltu_vx_u16m2_b8_m(...) __riscv_vmsltu_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u16m4_b4(...) __riscv_vmsltu_vx_u16m4_b4(__VA_ARGS__) |
| #define | vmsltu_vx_u16m4_b4_m(...) __riscv_vmsltu_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u16m8_b2(...) __riscv_vmsltu_vx_u16m8_b2(__VA_ARGS__) |
| #define | vmsltu_vx_u16m8_b2_m(...) __riscv_vmsltu_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u16mf2_b32(...) __riscv_vmsltu_vx_u16mf2_b32(__VA_ARGS__) |
| #define | vmsltu_vx_u16mf2_b32_m(...) __riscv_vmsltu_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u16mf4_b64(...) __riscv_vmsltu_vx_u16mf4_b64(__VA_ARGS__) |
| #define | vmsltu_vx_u16mf4_b64_m(...) __riscv_vmsltu_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u32m1_b32(...) __riscv_vmsltu_vx_u32m1_b32(__VA_ARGS__) |
| #define | vmsltu_vx_u32m1_b32_m(...) __riscv_vmsltu_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u32m2_b16(...) __riscv_vmsltu_vx_u32m2_b16(__VA_ARGS__) |
| #define | vmsltu_vx_u32m2_b16_m(...) __riscv_vmsltu_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u32m4_b8(...) __riscv_vmsltu_vx_u32m4_b8(__VA_ARGS__) |
| #define | vmsltu_vx_u32m4_b8_m(...) __riscv_vmsltu_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u32m8_b4(...) __riscv_vmsltu_vx_u32m8_b4(__VA_ARGS__) |
| #define | vmsltu_vx_u32m8_b4_m(...) __riscv_vmsltu_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u32mf2_b64(...) __riscv_vmsltu_vx_u32mf2_b64(__VA_ARGS__) |
| #define | vmsltu_vx_u32mf2_b64_m(...) __riscv_vmsltu_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u64m1_b64(...) __riscv_vmsltu_vx_u64m1_b64(__VA_ARGS__) |
| #define | vmsltu_vx_u64m1_b64_m(...) __riscv_vmsltu_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u64m2_b32(...) __riscv_vmsltu_vx_u64m2_b32(__VA_ARGS__) |
| #define | vmsltu_vx_u64m2_b32_m(...) __riscv_vmsltu_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u64m4_b16(...) __riscv_vmsltu_vx_u64m4_b16(__VA_ARGS__) |
| #define | vmsltu_vx_u64m4_b16_m(...) __riscv_vmsltu_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u64m8_b8(...) __riscv_vmsltu_vx_u64m8_b8(__VA_ARGS__) |
| #define | vmsltu_vx_u64m8_b8_m(...) __riscv_vmsltu_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u8m1_b8(...) __riscv_vmsltu_vx_u8m1_b8(__VA_ARGS__) |
| #define | vmsltu_vx_u8m1_b8_m(...) __riscv_vmsltu_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u8m2_b4(...) __riscv_vmsltu_vx_u8m2_b4(__VA_ARGS__) |
| #define | vmsltu_vx_u8m2_b4_m(...) __riscv_vmsltu_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u8m4_b2(...) __riscv_vmsltu_vx_u8m4_b2(__VA_ARGS__) |
| #define | vmsltu_vx_u8m4_b2_m(...) __riscv_vmsltu_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u8m8_b1(...) __riscv_vmsltu_vx_u8m8_b1(__VA_ARGS__) |
| #define | vmsltu_vx_u8m8_b1_m(...) __riscv_vmsltu_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u8mf2_b16(...) __riscv_vmsltu_vx_u8mf2_b16(__VA_ARGS__) |
| #define | vmsltu_vx_u8mf2_b16_m(...) __riscv_vmsltu_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u8mf4_b32(...) __riscv_vmsltu_vx_u8mf4_b32(__VA_ARGS__) |
| #define | vmsltu_vx_u8mf4_b32_m(...) __riscv_vmsltu_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsltu_vx_u8mf8_b64(...) __riscv_vmsltu_vx_u8mf8_b64(__VA_ARGS__) |
| #define | vmsltu_vx_u8mf8_b64_m(...) __riscv_vmsltu_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsne_vv_i16m1_b16(...) __riscv_vmsne_vv_i16m1_b16(__VA_ARGS__) |
| #define | vmsne_vv_i16m1_b16_m(...) __riscv_vmsne_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmsne_vv_i16m2_b8(...) __riscv_vmsne_vv_i16m2_b8(__VA_ARGS__) |
| #define | vmsne_vv_i16m2_b8_m(...) __riscv_vmsne_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmsne_vv_i16m4_b4(...) __riscv_vmsne_vv_i16m4_b4(__VA_ARGS__) |
| #define | vmsne_vv_i16m4_b4_m(...) __riscv_vmsne_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmsne_vv_i16m8_b2(...) __riscv_vmsne_vv_i16m8_b2(__VA_ARGS__) |
| #define | vmsne_vv_i16m8_b2_m(...) __riscv_vmsne_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmsne_vv_i16mf2_b32(...) __riscv_vmsne_vv_i16mf2_b32(__VA_ARGS__) |
| #define | vmsne_vv_i16mf2_b32_m(...) __riscv_vmsne_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsne_vv_i16mf4_b64(...) __riscv_vmsne_vv_i16mf4_b64(__VA_ARGS__) |
| #define | vmsne_vv_i16mf4_b64_m(...) __riscv_vmsne_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsne_vv_i32m1_b32(...) __riscv_vmsne_vv_i32m1_b32(__VA_ARGS__) |
| #define | vmsne_vv_i32m1_b32_m(...) __riscv_vmsne_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmsne_vv_i32m2_b16(...) __riscv_vmsne_vv_i32m2_b16(__VA_ARGS__) |
| #define | vmsne_vv_i32m2_b16_m(...) __riscv_vmsne_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmsne_vv_i32m4_b8(...) __riscv_vmsne_vv_i32m4_b8(__VA_ARGS__) |
| #define | vmsne_vv_i32m4_b8_m(...) __riscv_vmsne_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmsne_vv_i32m8_b4(...) __riscv_vmsne_vv_i32m8_b4(__VA_ARGS__) |
| #define | vmsne_vv_i32m8_b4_m(...) __riscv_vmsne_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmsne_vv_i32mf2_b64(...) __riscv_vmsne_vv_i32mf2_b64(__VA_ARGS__) |
| #define | vmsne_vv_i32mf2_b64_m(...) __riscv_vmsne_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsne_vv_i64m1_b64(...) __riscv_vmsne_vv_i64m1_b64(__VA_ARGS__) |
| #define | vmsne_vv_i64m1_b64_m(...) __riscv_vmsne_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmsne_vv_i64m2_b32(...) __riscv_vmsne_vv_i64m2_b32(__VA_ARGS__) |
| #define | vmsne_vv_i64m2_b32_m(...) __riscv_vmsne_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmsne_vv_i64m4_b16(...) __riscv_vmsne_vv_i64m4_b16(__VA_ARGS__) |
| #define | vmsne_vv_i64m4_b16_m(...) __riscv_vmsne_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmsne_vv_i64m8_b8(...) __riscv_vmsne_vv_i64m8_b8(__VA_ARGS__) |
| #define | vmsne_vv_i64m8_b8_m(...) __riscv_vmsne_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmsne_vv_i8m1_b8(...) __riscv_vmsne_vv_i8m1_b8(__VA_ARGS__) |
| #define | vmsne_vv_i8m1_b8_m(...) __riscv_vmsne_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmsne_vv_i8m2_b4(...) __riscv_vmsne_vv_i8m2_b4(__VA_ARGS__) |
| #define | vmsne_vv_i8m2_b4_m(...) __riscv_vmsne_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmsne_vv_i8m4_b2(...) __riscv_vmsne_vv_i8m4_b2(__VA_ARGS__) |
| #define | vmsne_vv_i8m4_b2_m(...) __riscv_vmsne_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmsne_vv_i8m8_b1(...) __riscv_vmsne_vv_i8m8_b1(__VA_ARGS__) |
| #define | vmsne_vv_i8m8_b1_m(...) __riscv_vmsne_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmsne_vv_i8mf2_b16(...) __riscv_vmsne_vv_i8mf2_b16(__VA_ARGS__) |
| #define | vmsne_vv_i8mf2_b16_m(...) __riscv_vmsne_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsne_vv_i8mf4_b32(...) __riscv_vmsne_vv_i8mf4_b32(__VA_ARGS__) |
| #define | vmsne_vv_i8mf4_b32_m(...) __riscv_vmsne_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsne_vv_i8mf8_b64(...) __riscv_vmsne_vv_i8mf8_b64(__VA_ARGS__) |
| #define | vmsne_vv_i8mf8_b64_m(...) __riscv_vmsne_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsne_vv_u16m1_b16(...) __riscv_vmsne_vv_u16m1_b16(__VA_ARGS__) |
| #define | vmsne_vv_u16m1_b16_m(...) __riscv_vmsne_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsne_vv_u16m2_b8(...) __riscv_vmsne_vv_u16m2_b8(__VA_ARGS__) |
| #define | vmsne_vv_u16m2_b8_m(...) __riscv_vmsne_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsne_vv_u16m4_b4(...) __riscv_vmsne_vv_u16m4_b4(__VA_ARGS__) |
| #define | vmsne_vv_u16m4_b4_m(...) __riscv_vmsne_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsne_vv_u16m8_b2(...) __riscv_vmsne_vv_u16m8_b2(__VA_ARGS__) |
| #define | vmsne_vv_u16m8_b2_m(...) __riscv_vmsne_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsne_vv_u16mf2_b32(...) __riscv_vmsne_vv_u16mf2_b32(__VA_ARGS__) |
| #define | vmsne_vv_u16mf2_b32_m(...) __riscv_vmsne_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsne_vv_u16mf4_b64(...) __riscv_vmsne_vv_u16mf4_b64(__VA_ARGS__) |
| #define | vmsne_vv_u16mf4_b64_m(...) __riscv_vmsne_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsne_vv_u32m1_b32(...) __riscv_vmsne_vv_u32m1_b32(__VA_ARGS__) |
| #define | vmsne_vv_u32m1_b32_m(...) __riscv_vmsne_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsne_vv_u32m2_b16(...) __riscv_vmsne_vv_u32m2_b16(__VA_ARGS__) |
| #define | vmsne_vv_u32m2_b16_m(...) __riscv_vmsne_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsne_vv_u32m4_b8(...) __riscv_vmsne_vv_u32m4_b8(__VA_ARGS__) |
| #define | vmsne_vv_u32m4_b8_m(...) __riscv_vmsne_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsne_vv_u32m8_b4(...) __riscv_vmsne_vv_u32m8_b4(__VA_ARGS__) |
| #define | vmsne_vv_u32m8_b4_m(...) __riscv_vmsne_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsne_vv_u32mf2_b64(...) __riscv_vmsne_vv_u32mf2_b64(__VA_ARGS__) |
| #define | vmsne_vv_u32mf2_b64_m(...) __riscv_vmsne_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsne_vv_u64m1_b64(...) __riscv_vmsne_vv_u64m1_b64(__VA_ARGS__) |
| #define | vmsne_vv_u64m1_b64_m(...) __riscv_vmsne_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsne_vv_u64m2_b32(...) __riscv_vmsne_vv_u64m2_b32(__VA_ARGS__) |
| #define | vmsne_vv_u64m2_b32_m(...) __riscv_vmsne_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsne_vv_u64m4_b16(...) __riscv_vmsne_vv_u64m4_b16(__VA_ARGS__) |
| #define | vmsne_vv_u64m4_b16_m(...) __riscv_vmsne_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsne_vv_u64m8_b8(...) __riscv_vmsne_vv_u64m8_b8(__VA_ARGS__) |
| #define | vmsne_vv_u64m8_b8_m(...) __riscv_vmsne_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsne_vv_u8m1_b8(...) __riscv_vmsne_vv_u8m1_b8(__VA_ARGS__) |
| #define | vmsne_vv_u8m1_b8_m(...) __riscv_vmsne_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsne_vv_u8m2_b4(...) __riscv_vmsne_vv_u8m2_b4(__VA_ARGS__) |
| #define | vmsne_vv_u8m2_b4_m(...) __riscv_vmsne_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsne_vv_u8m4_b2(...) __riscv_vmsne_vv_u8m4_b2(__VA_ARGS__) |
| #define | vmsne_vv_u8m4_b2_m(...) __riscv_vmsne_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsne_vv_u8m8_b1(...) __riscv_vmsne_vv_u8m8_b1(__VA_ARGS__) |
| #define | vmsne_vv_u8m8_b1_m(...) __riscv_vmsne_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsne_vv_u8mf2_b16(...) __riscv_vmsne_vv_u8mf2_b16(__VA_ARGS__) |
| #define | vmsne_vv_u8mf2_b16_m(...) __riscv_vmsne_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsne_vv_u8mf4_b32(...) __riscv_vmsne_vv_u8mf4_b32(__VA_ARGS__) |
| #define | vmsne_vv_u8mf4_b32_m(...) __riscv_vmsne_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsne_vv_u8mf8_b64(...) __riscv_vmsne_vv_u8mf8_b64(__VA_ARGS__) |
| #define | vmsne_vv_u8mf8_b64_m(...) __riscv_vmsne_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsne_vx_i16m1_b16(...) __riscv_vmsne_vx_i16m1_b16(__VA_ARGS__) |
| #define | vmsne_vx_i16m1_b16_m(...) __riscv_vmsne_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define | vmsne_vx_i16m2_b8(...) __riscv_vmsne_vx_i16m2_b8(__VA_ARGS__) |
| #define | vmsne_vx_i16m2_b8_m(...) __riscv_vmsne_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define | vmsne_vx_i16m4_b4(...) __riscv_vmsne_vx_i16m4_b4(__VA_ARGS__) |
| #define | vmsne_vx_i16m4_b4_m(...) __riscv_vmsne_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define | vmsne_vx_i16m8_b2(...) __riscv_vmsne_vx_i16m8_b2(__VA_ARGS__) |
| #define | vmsne_vx_i16m8_b2_m(...) __riscv_vmsne_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define | vmsne_vx_i16mf2_b32(...) __riscv_vmsne_vx_i16mf2_b32(__VA_ARGS__) |
| #define | vmsne_vx_i16mf2_b32_m(...) __riscv_vmsne_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsne_vx_i16mf4_b64(...) __riscv_vmsne_vx_i16mf4_b64(__VA_ARGS__) |
| #define | vmsne_vx_i16mf4_b64_m(...) __riscv_vmsne_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsne_vx_i32m1_b32(...) __riscv_vmsne_vx_i32m1_b32(__VA_ARGS__) |
| #define | vmsne_vx_i32m1_b32_m(...) __riscv_vmsne_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define | vmsne_vx_i32m2_b16(...) __riscv_vmsne_vx_i32m2_b16(__VA_ARGS__) |
| #define | vmsne_vx_i32m2_b16_m(...) __riscv_vmsne_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define | vmsne_vx_i32m4_b8(...) __riscv_vmsne_vx_i32m4_b8(__VA_ARGS__) |
| #define | vmsne_vx_i32m4_b8_m(...) __riscv_vmsne_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define | vmsne_vx_i32m8_b4(...) __riscv_vmsne_vx_i32m8_b4(__VA_ARGS__) |
| #define | vmsne_vx_i32m8_b4_m(...) __riscv_vmsne_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define | vmsne_vx_i32mf2_b64(...) __riscv_vmsne_vx_i32mf2_b64(__VA_ARGS__) |
| #define | vmsne_vx_i32mf2_b64_m(...) __riscv_vmsne_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsne_vx_i64m1_b64(...) __riscv_vmsne_vx_i64m1_b64(__VA_ARGS__) |
| #define | vmsne_vx_i64m1_b64_m(...) __riscv_vmsne_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define | vmsne_vx_i64m2_b32(...) __riscv_vmsne_vx_i64m2_b32(__VA_ARGS__) |
| #define | vmsne_vx_i64m2_b32_m(...) __riscv_vmsne_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define | vmsne_vx_i64m4_b16(...) __riscv_vmsne_vx_i64m4_b16(__VA_ARGS__) |
| #define | vmsne_vx_i64m4_b16_m(...) __riscv_vmsne_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define | vmsne_vx_i64m8_b8(...) __riscv_vmsne_vx_i64m8_b8(__VA_ARGS__) |
| #define | vmsne_vx_i64m8_b8_m(...) __riscv_vmsne_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define | vmsne_vx_i8m1_b8(...) __riscv_vmsne_vx_i8m1_b8(__VA_ARGS__) |
| #define | vmsne_vx_i8m1_b8_m(...) __riscv_vmsne_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define | vmsne_vx_i8m2_b4(...) __riscv_vmsne_vx_i8m2_b4(__VA_ARGS__) |
| #define | vmsne_vx_i8m2_b4_m(...) __riscv_vmsne_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define | vmsne_vx_i8m4_b2(...) __riscv_vmsne_vx_i8m4_b2(__VA_ARGS__) |
| #define | vmsne_vx_i8m4_b2_m(...) __riscv_vmsne_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define | vmsne_vx_i8m8_b1(...) __riscv_vmsne_vx_i8m8_b1(__VA_ARGS__) |
| #define | vmsne_vx_i8m8_b1_m(...) __riscv_vmsne_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define | vmsne_vx_i8mf2_b16(...) __riscv_vmsne_vx_i8mf2_b16(__VA_ARGS__) |
| #define | vmsne_vx_i8mf2_b16_m(...) __riscv_vmsne_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsne_vx_i8mf4_b32(...) __riscv_vmsne_vx_i8mf4_b32(__VA_ARGS__) |
| #define | vmsne_vx_i8mf4_b32_m(...) __riscv_vmsne_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsne_vx_i8mf8_b64(...) __riscv_vmsne_vx_i8mf8_b64(__VA_ARGS__) |
| #define | vmsne_vx_i8mf8_b64_m(...) __riscv_vmsne_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsne_vx_u16m1_b16(...) __riscv_vmsne_vx_u16m1_b16(__VA_ARGS__) |
| #define | vmsne_vx_u16m1_b16_m(...) __riscv_vmsne_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define | vmsne_vx_u16m2_b8(...) __riscv_vmsne_vx_u16m2_b8(__VA_ARGS__) |
| #define | vmsne_vx_u16m2_b8_m(...) __riscv_vmsne_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define | vmsne_vx_u16m4_b4(...) __riscv_vmsne_vx_u16m4_b4(__VA_ARGS__) |
| #define | vmsne_vx_u16m4_b4_m(...) __riscv_vmsne_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define | vmsne_vx_u16m8_b2(...) __riscv_vmsne_vx_u16m8_b2(__VA_ARGS__) |
| #define | vmsne_vx_u16m8_b2_m(...) __riscv_vmsne_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define | vmsne_vx_u16mf2_b32(...) __riscv_vmsne_vx_u16mf2_b32(__VA_ARGS__) |
| #define | vmsne_vx_u16mf2_b32_m(...) __riscv_vmsne_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define | vmsne_vx_u16mf4_b64(...) __riscv_vmsne_vx_u16mf4_b64(__VA_ARGS__) |
| #define | vmsne_vx_u16mf4_b64_m(...) __riscv_vmsne_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define | vmsne_vx_u32m1_b32(...) __riscv_vmsne_vx_u32m1_b32(__VA_ARGS__) |
| #define | vmsne_vx_u32m1_b32_m(...) __riscv_vmsne_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define | vmsne_vx_u32m2_b16(...) __riscv_vmsne_vx_u32m2_b16(__VA_ARGS__) |
| #define | vmsne_vx_u32m2_b16_m(...) __riscv_vmsne_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define | vmsne_vx_u32m4_b8(...) __riscv_vmsne_vx_u32m4_b8(__VA_ARGS__) |
| #define | vmsne_vx_u32m4_b8_m(...) __riscv_vmsne_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define | vmsne_vx_u32m8_b4(...) __riscv_vmsne_vx_u32m8_b4(__VA_ARGS__) |
| #define | vmsne_vx_u32m8_b4_m(...) __riscv_vmsne_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define | vmsne_vx_u32mf2_b64(...) __riscv_vmsne_vx_u32mf2_b64(__VA_ARGS__) |
| #define | vmsne_vx_u32mf2_b64_m(...) __riscv_vmsne_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define | vmsne_vx_u64m1_b64(...) __riscv_vmsne_vx_u64m1_b64(__VA_ARGS__) |
| #define | vmsne_vx_u64m1_b64_m(...) __riscv_vmsne_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define | vmsne_vx_u64m2_b32(...) __riscv_vmsne_vx_u64m2_b32(__VA_ARGS__) |
| #define | vmsne_vx_u64m2_b32_m(...) __riscv_vmsne_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define | vmsne_vx_u64m4_b16(...) __riscv_vmsne_vx_u64m4_b16(__VA_ARGS__) |
| #define | vmsne_vx_u64m4_b16_m(...) __riscv_vmsne_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define | vmsne_vx_u64m8_b8(...) __riscv_vmsne_vx_u64m8_b8(__VA_ARGS__) |
| #define | vmsne_vx_u64m8_b8_m(...) __riscv_vmsne_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define | vmsne_vx_u8m1_b8(...) __riscv_vmsne_vx_u8m1_b8(__VA_ARGS__) |
| #define | vmsne_vx_u8m1_b8_m(...) __riscv_vmsne_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define | vmsne_vx_u8m2_b4(...) __riscv_vmsne_vx_u8m2_b4(__VA_ARGS__) |
| #define | vmsne_vx_u8m2_b4_m(...) __riscv_vmsne_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define | vmsne_vx_u8m4_b2(...) __riscv_vmsne_vx_u8m4_b2(__VA_ARGS__) |
| #define | vmsne_vx_u8m4_b2_m(...) __riscv_vmsne_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define | vmsne_vx_u8m8_b1(...) __riscv_vmsne_vx_u8m8_b1(__VA_ARGS__) |
| #define | vmsne_vx_u8m8_b1_m(...) __riscv_vmsne_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define | vmsne_vx_u8mf2_b16(...) __riscv_vmsne_vx_u8mf2_b16(__VA_ARGS__) |
| #define | vmsne_vx_u8mf2_b16_m(...) __riscv_vmsne_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define | vmsne_vx_u8mf4_b32(...) __riscv_vmsne_vx_u8mf4_b32(__VA_ARGS__) |
| #define | vmsne_vx_u8mf4_b32_m(...) __riscv_vmsne_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define | vmsne_vx_u8mf8_b64(...) __riscv_vmsne_vx_u8mf8_b64(__VA_ARGS__) |
| #define | vmsne_vx_u8mf8_b64_m(...) __riscv_vmsne_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define | vmsof_m_b1(...) __riscv_vmsof_m_b1(__VA_ARGS__) |
| #define | vmsof_m_b16(...) __riscv_vmsof_m_b16(__VA_ARGS__) |
| #define | vmsof_m_b16_m(...) __riscv_vmsof_m_b16_mu(__VA_ARGS__) |
| #define | vmsof_m_b1_m(...) __riscv_vmsof_m_b1_mu(__VA_ARGS__) |
| #define | vmsof_m_b2(...) __riscv_vmsof_m_b2(__VA_ARGS__) |
| #define | vmsof_m_b2_m(...) __riscv_vmsof_m_b2_mu(__VA_ARGS__) |
| #define | vmsof_m_b32(...) __riscv_vmsof_m_b32(__VA_ARGS__) |
| #define | vmsof_m_b32_m(...) __riscv_vmsof_m_b32_mu(__VA_ARGS__) |
| #define | vmsof_m_b4(...) __riscv_vmsof_m_b4(__VA_ARGS__) |
| #define | vmsof_m_b4_m(...) __riscv_vmsof_m_b4_mu(__VA_ARGS__) |
| #define | vmsof_m_b64(...) __riscv_vmsof_m_b64(__VA_ARGS__) |
| #define | vmsof_m_b64_m(...) __riscv_vmsof_m_b64_mu(__VA_ARGS__) |
| #define | vmsof_m_b8(...) __riscv_vmsof_m_b8(__VA_ARGS__) |
| #define | vmsof_m_b8_m(...) __riscv_vmsof_m_b8_mu(__VA_ARGS__) |
| #define | vmul_vv_i16m1(...) __riscv_vmul_vv_i16m1(__VA_ARGS__) |
| #define | vmul_vv_i16m1_m(...) __riscv_vmul_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vmul_vv_i16m2(...) __riscv_vmul_vv_i16m2(__VA_ARGS__) |
| #define | vmul_vv_i16m2_m(...) __riscv_vmul_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vmul_vv_i16m4(...) __riscv_vmul_vv_i16m4(__VA_ARGS__) |
| #define | vmul_vv_i16m4_m(...) __riscv_vmul_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vmul_vv_i16m8(...) __riscv_vmul_vv_i16m8(__VA_ARGS__) |
| #define | vmul_vv_i16m8_m(...) __riscv_vmul_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vmul_vv_i16mf2(...) __riscv_vmul_vv_i16mf2(__VA_ARGS__) |
| #define | vmul_vv_i16mf2_m(...) __riscv_vmul_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vmul_vv_i16mf4(...) __riscv_vmul_vv_i16mf4(__VA_ARGS__) |
| #define | vmul_vv_i16mf4_m(...) __riscv_vmul_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vmul_vv_i32m1(...) __riscv_vmul_vv_i32m1(__VA_ARGS__) |
| #define | vmul_vv_i32m1_m(...) __riscv_vmul_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vmul_vv_i32m2(...) __riscv_vmul_vv_i32m2(__VA_ARGS__) |
| #define | vmul_vv_i32m2_m(...) __riscv_vmul_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vmul_vv_i32m4(...) __riscv_vmul_vv_i32m4(__VA_ARGS__) |
| #define | vmul_vv_i32m4_m(...) __riscv_vmul_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vmul_vv_i32m8(...) __riscv_vmul_vv_i32m8(__VA_ARGS__) |
| #define | vmul_vv_i32m8_m(...) __riscv_vmul_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vmul_vv_i32mf2(...) __riscv_vmul_vv_i32mf2(__VA_ARGS__) |
| #define | vmul_vv_i32mf2_m(...) __riscv_vmul_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vmul_vv_i64m1(...) __riscv_vmul_vv_i64m1(__VA_ARGS__) |
| #define | vmul_vv_i64m1_m(...) __riscv_vmul_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vmul_vv_i64m2(...) __riscv_vmul_vv_i64m2(__VA_ARGS__) |
| #define | vmul_vv_i64m2_m(...) __riscv_vmul_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vmul_vv_i64m4(...) __riscv_vmul_vv_i64m4(__VA_ARGS__) |
| #define | vmul_vv_i64m4_m(...) __riscv_vmul_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vmul_vv_i64m8(...) __riscv_vmul_vv_i64m8(__VA_ARGS__) |
| #define | vmul_vv_i64m8_m(...) __riscv_vmul_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vmul_vv_i8m1(...) __riscv_vmul_vv_i8m1(__VA_ARGS__) |
| #define | vmul_vv_i8m1_m(...) __riscv_vmul_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vmul_vv_i8m2(...) __riscv_vmul_vv_i8m2(__VA_ARGS__) |
| #define | vmul_vv_i8m2_m(...) __riscv_vmul_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vmul_vv_i8m4(...) __riscv_vmul_vv_i8m4(__VA_ARGS__) |
| #define | vmul_vv_i8m4_m(...) __riscv_vmul_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vmul_vv_i8m8(...) __riscv_vmul_vv_i8m8(__VA_ARGS__) |
| #define | vmul_vv_i8m8_m(...) __riscv_vmul_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vmul_vv_i8mf2(...) __riscv_vmul_vv_i8mf2(__VA_ARGS__) |
| #define | vmul_vv_i8mf2_m(...) __riscv_vmul_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vmul_vv_i8mf4(...) __riscv_vmul_vv_i8mf4(__VA_ARGS__) |
| #define | vmul_vv_i8mf4_m(...) __riscv_vmul_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vmul_vv_i8mf8(...) __riscv_vmul_vv_i8mf8(__VA_ARGS__) |
| #define | vmul_vv_i8mf8_m(...) __riscv_vmul_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vmul_vv_u16m1(...) __riscv_vmul_vv_u16m1(__VA_ARGS__) |
| #define | vmul_vv_u16m1_m(...) __riscv_vmul_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vmul_vv_u16m2(...) __riscv_vmul_vv_u16m2(__VA_ARGS__) |
| #define | vmul_vv_u16m2_m(...) __riscv_vmul_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vmul_vv_u16m4(...) __riscv_vmul_vv_u16m4(__VA_ARGS__) |
| #define | vmul_vv_u16m4_m(...) __riscv_vmul_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vmul_vv_u16m8(...) __riscv_vmul_vv_u16m8(__VA_ARGS__) |
| #define | vmul_vv_u16m8_m(...) __riscv_vmul_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vmul_vv_u16mf2(...) __riscv_vmul_vv_u16mf2(__VA_ARGS__) |
| #define | vmul_vv_u16mf2_m(...) __riscv_vmul_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vmul_vv_u16mf4(...) __riscv_vmul_vv_u16mf4(__VA_ARGS__) |
| #define | vmul_vv_u16mf4_m(...) __riscv_vmul_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vmul_vv_u32m1(...) __riscv_vmul_vv_u32m1(__VA_ARGS__) |
| #define | vmul_vv_u32m1_m(...) __riscv_vmul_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vmul_vv_u32m2(...) __riscv_vmul_vv_u32m2(__VA_ARGS__) |
| #define | vmul_vv_u32m2_m(...) __riscv_vmul_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vmul_vv_u32m4(...) __riscv_vmul_vv_u32m4(__VA_ARGS__) |
| #define | vmul_vv_u32m4_m(...) __riscv_vmul_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vmul_vv_u32m8(...) __riscv_vmul_vv_u32m8(__VA_ARGS__) |
| #define | vmul_vv_u32m8_m(...) __riscv_vmul_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vmul_vv_u32mf2(...) __riscv_vmul_vv_u32mf2(__VA_ARGS__) |
| #define | vmul_vv_u32mf2_m(...) __riscv_vmul_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vmul_vv_u64m1(...) __riscv_vmul_vv_u64m1(__VA_ARGS__) |
| #define | vmul_vv_u64m1_m(...) __riscv_vmul_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vmul_vv_u64m2(...) __riscv_vmul_vv_u64m2(__VA_ARGS__) |
| #define | vmul_vv_u64m2_m(...) __riscv_vmul_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vmul_vv_u64m4(...) __riscv_vmul_vv_u64m4(__VA_ARGS__) |
| #define | vmul_vv_u64m4_m(...) __riscv_vmul_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vmul_vv_u64m8(...) __riscv_vmul_vv_u64m8(__VA_ARGS__) |
| #define | vmul_vv_u64m8_m(...) __riscv_vmul_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vmul_vv_u8m1(...) __riscv_vmul_vv_u8m1(__VA_ARGS__) |
| #define | vmul_vv_u8m1_m(...) __riscv_vmul_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vmul_vv_u8m2(...) __riscv_vmul_vv_u8m2(__VA_ARGS__) |
| #define | vmul_vv_u8m2_m(...) __riscv_vmul_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vmul_vv_u8m4(...) __riscv_vmul_vv_u8m4(__VA_ARGS__) |
| #define | vmul_vv_u8m4_m(...) __riscv_vmul_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vmul_vv_u8m8(...) __riscv_vmul_vv_u8m8(__VA_ARGS__) |
| #define | vmul_vv_u8m8_m(...) __riscv_vmul_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vmul_vv_u8mf2(...) __riscv_vmul_vv_u8mf2(__VA_ARGS__) |
| #define | vmul_vv_u8mf2_m(...) __riscv_vmul_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vmul_vv_u8mf4(...) __riscv_vmul_vv_u8mf4(__VA_ARGS__) |
| #define | vmul_vv_u8mf4_m(...) __riscv_vmul_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vmul_vv_u8mf8(...) __riscv_vmul_vv_u8mf8(__VA_ARGS__) |
| #define | vmul_vv_u8mf8_m(...) __riscv_vmul_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vmul_vx_i16m1(...) __riscv_vmul_vx_i16m1(__VA_ARGS__) |
| #define | vmul_vx_i16m1_m(...) __riscv_vmul_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vmul_vx_i16m2(...) __riscv_vmul_vx_i16m2(__VA_ARGS__) |
| #define | vmul_vx_i16m2_m(...) __riscv_vmul_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vmul_vx_i16m4(...) __riscv_vmul_vx_i16m4(__VA_ARGS__) |
| #define | vmul_vx_i16m4_m(...) __riscv_vmul_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vmul_vx_i16m8(...) __riscv_vmul_vx_i16m8(__VA_ARGS__) |
| #define | vmul_vx_i16m8_m(...) __riscv_vmul_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vmul_vx_i16mf2(...) __riscv_vmul_vx_i16mf2(__VA_ARGS__) |
| #define | vmul_vx_i16mf2_m(...) __riscv_vmul_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vmul_vx_i16mf4(...) __riscv_vmul_vx_i16mf4(__VA_ARGS__) |
| #define | vmul_vx_i16mf4_m(...) __riscv_vmul_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vmul_vx_i32m1(...) __riscv_vmul_vx_i32m1(__VA_ARGS__) |
| #define | vmul_vx_i32m1_m(...) __riscv_vmul_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vmul_vx_i32m2(...) __riscv_vmul_vx_i32m2(__VA_ARGS__) |
| #define | vmul_vx_i32m2_m(...) __riscv_vmul_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vmul_vx_i32m4(...) __riscv_vmul_vx_i32m4(__VA_ARGS__) |
| #define | vmul_vx_i32m4_m(...) __riscv_vmul_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vmul_vx_i32m8(...) __riscv_vmul_vx_i32m8(__VA_ARGS__) |
| #define | vmul_vx_i32m8_m(...) __riscv_vmul_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vmul_vx_i32mf2(...) __riscv_vmul_vx_i32mf2(__VA_ARGS__) |
| #define | vmul_vx_i32mf2_m(...) __riscv_vmul_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vmul_vx_i64m1(...) __riscv_vmul_vx_i64m1(__VA_ARGS__) |
| #define | vmul_vx_i64m1_m(...) __riscv_vmul_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vmul_vx_i64m2(...) __riscv_vmul_vx_i64m2(__VA_ARGS__) |
| #define | vmul_vx_i64m2_m(...) __riscv_vmul_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vmul_vx_i64m4(...) __riscv_vmul_vx_i64m4(__VA_ARGS__) |
| #define | vmul_vx_i64m4_m(...) __riscv_vmul_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vmul_vx_i64m8(...) __riscv_vmul_vx_i64m8(__VA_ARGS__) |
| #define | vmul_vx_i64m8_m(...) __riscv_vmul_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vmul_vx_i8m1(...) __riscv_vmul_vx_i8m1(__VA_ARGS__) |
| #define | vmul_vx_i8m1_m(...) __riscv_vmul_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vmul_vx_i8m2(...) __riscv_vmul_vx_i8m2(__VA_ARGS__) |
| #define | vmul_vx_i8m2_m(...) __riscv_vmul_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vmul_vx_i8m4(...) __riscv_vmul_vx_i8m4(__VA_ARGS__) |
| #define | vmul_vx_i8m4_m(...) __riscv_vmul_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vmul_vx_i8m8(...) __riscv_vmul_vx_i8m8(__VA_ARGS__) |
| #define | vmul_vx_i8m8_m(...) __riscv_vmul_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vmul_vx_i8mf2(...) __riscv_vmul_vx_i8mf2(__VA_ARGS__) |
| #define | vmul_vx_i8mf2_m(...) __riscv_vmul_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vmul_vx_i8mf4(...) __riscv_vmul_vx_i8mf4(__VA_ARGS__) |
| #define | vmul_vx_i8mf4_m(...) __riscv_vmul_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vmul_vx_i8mf8(...) __riscv_vmul_vx_i8mf8(__VA_ARGS__) |
| #define | vmul_vx_i8mf8_m(...) __riscv_vmul_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vmul_vx_u16m1(...) __riscv_vmul_vx_u16m1(__VA_ARGS__) |
| #define | vmul_vx_u16m1_m(...) __riscv_vmul_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vmul_vx_u16m2(...) __riscv_vmul_vx_u16m2(__VA_ARGS__) |
| #define | vmul_vx_u16m2_m(...) __riscv_vmul_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vmul_vx_u16m4(...) __riscv_vmul_vx_u16m4(__VA_ARGS__) |
| #define | vmul_vx_u16m4_m(...) __riscv_vmul_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vmul_vx_u16m8(...) __riscv_vmul_vx_u16m8(__VA_ARGS__) |
| #define | vmul_vx_u16m8_m(...) __riscv_vmul_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vmul_vx_u16mf2(...) __riscv_vmul_vx_u16mf2(__VA_ARGS__) |
| #define | vmul_vx_u16mf2_m(...) __riscv_vmul_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vmul_vx_u16mf4(...) __riscv_vmul_vx_u16mf4(__VA_ARGS__) |
| #define | vmul_vx_u16mf4_m(...) __riscv_vmul_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vmul_vx_u32m1(...) __riscv_vmul_vx_u32m1(__VA_ARGS__) |
| #define | vmul_vx_u32m1_m(...) __riscv_vmul_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vmul_vx_u32m2(...) __riscv_vmul_vx_u32m2(__VA_ARGS__) |
| #define | vmul_vx_u32m2_m(...) __riscv_vmul_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vmul_vx_u32m4(...) __riscv_vmul_vx_u32m4(__VA_ARGS__) |
| #define | vmul_vx_u32m4_m(...) __riscv_vmul_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vmul_vx_u32m8(...) __riscv_vmul_vx_u32m8(__VA_ARGS__) |
| #define | vmul_vx_u32m8_m(...) __riscv_vmul_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vmul_vx_u32mf2(...) __riscv_vmul_vx_u32mf2(__VA_ARGS__) |
| #define | vmul_vx_u32mf2_m(...) __riscv_vmul_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vmul_vx_u64m1(...) __riscv_vmul_vx_u64m1(__VA_ARGS__) |
| #define | vmul_vx_u64m1_m(...) __riscv_vmul_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vmul_vx_u64m2(...) __riscv_vmul_vx_u64m2(__VA_ARGS__) |
| #define | vmul_vx_u64m2_m(...) __riscv_vmul_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vmul_vx_u64m4(...) __riscv_vmul_vx_u64m4(__VA_ARGS__) |
| #define | vmul_vx_u64m4_m(...) __riscv_vmul_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vmul_vx_u64m8(...) __riscv_vmul_vx_u64m8(__VA_ARGS__) |
| #define | vmul_vx_u64m8_m(...) __riscv_vmul_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vmul_vx_u8m1(...) __riscv_vmul_vx_u8m1(__VA_ARGS__) |
| #define | vmul_vx_u8m1_m(...) __riscv_vmul_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vmul_vx_u8m2(...) __riscv_vmul_vx_u8m2(__VA_ARGS__) |
| #define | vmul_vx_u8m2_m(...) __riscv_vmul_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vmul_vx_u8m4(...) __riscv_vmul_vx_u8m4(__VA_ARGS__) |
| #define | vmul_vx_u8m4_m(...) __riscv_vmul_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vmul_vx_u8m8(...) __riscv_vmul_vx_u8m8(__VA_ARGS__) |
| #define | vmul_vx_u8m8_m(...) __riscv_vmul_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vmul_vx_u8mf2(...) __riscv_vmul_vx_u8mf2(__VA_ARGS__) |
| #define | vmul_vx_u8mf2_m(...) __riscv_vmul_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vmul_vx_u8mf4(...) __riscv_vmul_vx_u8mf4(__VA_ARGS__) |
| #define | vmul_vx_u8mf4_m(...) __riscv_vmul_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vmul_vx_u8mf8(...) __riscv_vmul_vx_u8mf8(__VA_ARGS__) |
| #define | vmul_vx_u8mf8_m(...) __riscv_vmul_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i16m1(...) __riscv_vmulh_vv_i16m1(__VA_ARGS__) |
| #define | vmulh_vv_i16m1_m(...) __riscv_vmulh_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i16m2(...) __riscv_vmulh_vv_i16m2(__VA_ARGS__) |
| #define | vmulh_vv_i16m2_m(...) __riscv_vmulh_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i16m4(...) __riscv_vmulh_vv_i16m4(__VA_ARGS__) |
| #define | vmulh_vv_i16m4_m(...) __riscv_vmulh_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i16m8(...) __riscv_vmulh_vv_i16m8(__VA_ARGS__) |
| #define | vmulh_vv_i16m8_m(...) __riscv_vmulh_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i16mf2(...) __riscv_vmulh_vv_i16mf2(__VA_ARGS__) |
| #define | vmulh_vv_i16mf2_m(...) __riscv_vmulh_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i16mf4(...) __riscv_vmulh_vv_i16mf4(__VA_ARGS__) |
| #define | vmulh_vv_i16mf4_m(...) __riscv_vmulh_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i32m1(...) __riscv_vmulh_vv_i32m1(__VA_ARGS__) |
| #define | vmulh_vv_i32m1_m(...) __riscv_vmulh_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i32m2(...) __riscv_vmulh_vv_i32m2(__VA_ARGS__) |
| #define | vmulh_vv_i32m2_m(...) __riscv_vmulh_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i32m4(...) __riscv_vmulh_vv_i32m4(__VA_ARGS__) |
| #define | vmulh_vv_i32m4_m(...) __riscv_vmulh_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i32m8(...) __riscv_vmulh_vv_i32m8(__VA_ARGS__) |
| #define | vmulh_vv_i32m8_m(...) __riscv_vmulh_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i32mf2(...) __riscv_vmulh_vv_i32mf2(__VA_ARGS__) |
| #define | vmulh_vv_i32mf2_m(...) __riscv_vmulh_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i64m1(...) __riscv_vmulh_vv_i64m1(__VA_ARGS__) |
| #define | vmulh_vv_i64m1_m(...) __riscv_vmulh_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i64m2(...) __riscv_vmulh_vv_i64m2(__VA_ARGS__) |
| #define | vmulh_vv_i64m2_m(...) __riscv_vmulh_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i64m4(...) __riscv_vmulh_vv_i64m4(__VA_ARGS__) |
| #define | vmulh_vv_i64m4_m(...) __riscv_vmulh_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i64m8(...) __riscv_vmulh_vv_i64m8(__VA_ARGS__) |
| #define | vmulh_vv_i64m8_m(...) __riscv_vmulh_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i8m1(...) __riscv_vmulh_vv_i8m1(__VA_ARGS__) |
| #define | vmulh_vv_i8m1_m(...) __riscv_vmulh_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i8m2(...) __riscv_vmulh_vv_i8m2(__VA_ARGS__) |
| #define | vmulh_vv_i8m2_m(...) __riscv_vmulh_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i8m4(...) __riscv_vmulh_vv_i8m4(__VA_ARGS__) |
| #define | vmulh_vv_i8m4_m(...) __riscv_vmulh_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i8m8(...) __riscv_vmulh_vv_i8m8(__VA_ARGS__) |
| #define | vmulh_vv_i8m8_m(...) __riscv_vmulh_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i8mf2(...) __riscv_vmulh_vv_i8mf2(__VA_ARGS__) |
| #define | vmulh_vv_i8mf2_m(...) __riscv_vmulh_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i8mf4(...) __riscv_vmulh_vv_i8mf4(__VA_ARGS__) |
| #define | vmulh_vv_i8mf4_m(...) __riscv_vmulh_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vmulh_vv_i8mf8(...) __riscv_vmulh_vv_i8mf8(__VA_ARGS__) |
| #define | vmulh_vv_i8mf8_m(...) __riscv_vmulh_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i16m1(...) __riscv_vmulh_vx_i16m1(__VA_ARGS__) |
| #define | vmulh_vx_i16m1_m(...) __riscv_vmulh_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i16m2(...) __riscv_vmulh_vx_i16m2(__VA_ARGS__) |
| #define | vmulh_vx_i16m2_m(...) __riscv_vmulh_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i16m4(...) __riscv_vmulh_vx_i16m4(__VA_ARGS__) |
| #define | vmulh_vx_i16m4_m(...) __riscv_vmulh_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i16m8(...) __riscv_vmulh_vx_i16m8(__VA_ARGS__) |
| #define | vmulh_vx_i16m8_m(...) __riscv_vmulh_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i16mf2(...) __riscv_vmulh_vx_i16mf2(__VA_ARGS__) |
| #define | vmulh_vx_i16mf2_m(...) __riscv_vmulh_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i16mf4(...) __riscv_vmulh_vx_i16mf4(__VA_ARGS__) |
| #define | vmulh_vx_i16mf4_m(...) __riscv_vmulh_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i32m1(...) __riscv_vmulh_vx_i32m1(__VA_ARGS__) |
| #define | vmulh_vx_i32m1_m(...) __riscv_vmulh_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i32m2(...) __riscv_vmulh_vx_i32m2(__VA_ARGS__) |
| #define | vmulh_vx_i32m2_m(...) __riscv_vmulh_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i32m4(...) __riscv_vmulh_vx_i32m4(__VA_ARGS__) |
| #define | vmulh_vx_i32m4_m(...) __riscv_vmulh_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i32m8(...) __riscv_vmulh_vx_i32m8(__VA_ARGS__) |
| #define | vmulh_vx_i32m8_m(...) __riscv_vmulh_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i32mf2(...) __riscv_vmulh_vx_i32mf2(__VA_ARGS__) |
| #define | vmulh_vx_i32mf2_m(...) __riscv_vmulh_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i64m1(...) __riscv_vmulh_vx_i64m1(__VA_ARGS__) |
| #define | vmulh_vx_i64m1_m(...) __riscv_vmulh_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i64m2(...) __riscv_vmulh_vx_i64m2(__VA_ARGS__) |
| #define | vmulh_vx_i64m2_m(...) __riscv_vmulh_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i64m4(...) __riscv_vmulh_vx_i64m4(__VA_ARGS__) |
| #define | vmulh_vx_i64m4_m(...) __riscv_vmulh_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i64m8(...) __riscv_vmulh_vx_i64m8(__VA_ARGS__) |
| #define | vmulh_vx_i64m8_m(...) __riscv_vmulh_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i8m1(...) __riscv_vmulh_vx_i8m1(__VA_ARGS__) |
| #define | vmulh_vx_i8m1_m(...) __riscv_vmulh_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i8m2(...) __riscv_vmulh_vx_i8m2(__VA_ARGS__) |
| #define | vmulh_vx_i8m2_m(...) __riscv_vmulh_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i8m4(...) __riscv_vmulh_vx_i8m4(__VA_ARGS__) |
| #define | vmulh_vx_i8m4_m(...) __riscv_vmulh_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i8m8(...) __riscv_vmulh_vx_i8m8(__VA_ARGS__) |
| #define | vmulh_vx_i8m8_m(...) __riscv_vmulh_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i8mf2(...) __riscv_vmulh_vx_i8mf2(__VA_ARGS__) |
| #define | vmulh_vx_i8mf2_m(...) __riscv_vmulh_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i8mf4(...) __riscv_vmulh_vx_i8mf4(__VA_ARGS__) |
| #define | vmulh_vx_i8mf4_m(...) __riscv_vmulh_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vmulh_vx_i8mf8(...) __riscv_vmulh_vx_i8mf8(__VA_ARGS__) |
| #define | vmulh_vx_i8mf8_m(...) __riscv_vmulh_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i16m1(...) __riscv_vmulhsu_vv_i16m1(__VA_ARGS__) |
| #define | vmulhsu_vv_i16m1_m(...) __riscv_vmulhsu_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i16m2(...) __riscv_vmulhsu_vv_i16m2(__VA_ARGS__) |
| #define | vmulhsu_vv_i16m2_m(...) __riscv_vmulhsu_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i16m4(...) __riscv_vmulhsu_vv_i16m4(__VA_ARGS__) |
| #define | vmulhsu_vv_i16m4_m(...) __riscv_vmulhsu_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i16m8(...) __riscv_vmulhsu_vv_i16m8(__VA_ARGS__) |
| #define | vmulhsu_vv_i16m8_m(...) __riscv_vmulhsu_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i16mf2(...) __riscv_vmulhsu_vv_i16mf2(__VA_ARGS__) |
| #define | vmulhsu_vv_i16mf2_m(...) __riscv_vmulhsu_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i16mf4(...) __riscv_vmulhsu_vv_i16mf4(__VA_ARGS__) |
| #define | vmulhsu_vv_i16mf4_m(...) __riscv_vmulhsu_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i32m1(...) __riscv_vmulhsu_vv_i32m1(__VA_ARGS__) |
| #define | vmulhsu_vv_i32m1_m(...) __riscv_vmulhsu_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i32m2(...) __riscv_vmulhsu_vv_i32m2(__VA_ARGS__) |
| #define | vmulhsu_vv_i32m2_m(...) __riscv_vmulhsu_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i32m4(...) __riscv_vmulhsu_vv_i32m4(__VA_ARGS__) |
| #define | vmulhsu_vv_i32m4_m(...) __riscv_vmulhsu_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i32m8(...) __riscv_vmulhsu_vv_i32m8(__VA_ARGS__) |
| #define | vmulhsu_vv_i32m8_m(...) __riscv_vmulhsu_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i32mf2(...) __riscv_vmulhsu_vv_i32mf2(__VA_ARGS__) |
| #define | vmulhsu_vv_i32mf2_m(...) __riscv_vmulhsu_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i64m1(...) __riscv_vmulhsu_vv_i64m1(__VA_ARGS__) |
| #define | vmulhsu_vv_i64m1_m(...) __riscv_vmulhsu_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i64m2(...) __riscv_vmulhsu_vv_i64m2(__VA_ARGS__) |
| #define | vmulhsu_vv_i64m2_m(...) __riscv_vmulhsu_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i64m4(...) __riscv_vmulhsu_vv_i64m4(__VA_ARGS__) |
| #define | vmulhsu_vv_i64m4_m(...) __riscv_vmulhsu_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i64m8(...) __riscv_vmulhsu_vv_i64m8(__VA_ARGS__) |
| #define | vmulhsu_vv_i64m8_m(...) __riscv_vmulhsu_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i8m1(...) __riscv_vmulhsu_vv_i8m1(__VA_ARGS__) |
| #define | vmulhsu_vv_i8m1_m(...) __riscv_vmulhsu_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i8m2(...) __riscv_vmulhsu_vv_i8m2(__VA_ARGS__) |
| #define | vmulhsu_vv_i8m2_m(...) __riscv_vmulhsu_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i8m4(...) __riscv_vmulhsu_vv_i8m4(__VA_ARGS__) |
| #define | vmulhsu_vv_i8m4_m(...) __riscv_vmulhsu_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i8m8(...) __riscv_vmulhsu_vv_i8m8(__VA_ARGS__) |
| #define | vmulhsu_vv_i8m8_m(...) __riscv_vmulhsu_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i8mf2(...) __riscv_vmulhsu_vv_i8mf2(__VA_ARGS__) |
| #define | vmulhsu_vv_i8mf2_m(...) __riscv_vmulhsu_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i8mf4(...) __riscv_vmulhsu_vv_i8mf4(__VA_ARGS__) |
| #define | vmulhsu_vv_i8mf4_m(...) __riscv_vmulhsu_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vv_i8mf8(...) __riscv_vmulhsu_vv_i8mf8(__VA_ARGS__) |
| #define | vmulhsu_vv_i8mf8_m(...) __riscv_vmulhsu_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i16m1(...) __riscv_vmulhsu_vx_i16m1(__VA_ARGS__) |
| #define | vmulhsu_vx_i16m1_m(...) __riscv_vmulhsu_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i16m2(...) __riscv_vmulhsu_vx_i16m2(__VA_ARGS__) |
| #define | vmulhsu_vx_i16m2_m(...) __riscv_vmulhsu_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i16m4(...) __riscv_vmulhsu_vx_i16m4(__VA_ARGS__) |
| #define | vmulhsu_vx_i16m4_m(...) __riscv_vmulhsu_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i16m8(...) __riscv_vmulhsu_vx_i16m8(__VA_ARGS__) |
| #define | vmulhsu_vx_i16m8_m(...) __riscv_vmulhsu_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i16mf2(...) __riscv_vmulhsu_vx_i16mf2(__VA_ARGS__) |
| #define | vmulhsu_vx_i16mf2_m(...) __riscv_vmulhsu_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i16mf4(...) __riscv_vmulhsu_vx_i16mf4(__VA_ARGS__) |
| #define | vmulhsu_vx_i16mf4_m(...) __riscv_vmulhsu_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i32m1(...) __riscv_vmulhsu_vx_i32m1(__VA_ARGS__) |
| #define | vmulhsu_vx_i32m1_m(...) __riscv_vmulhsu_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i32m2(...) __riscv_vmulhsu_vx_i32m2(__VA_ARGS__) |
| #define | vmulhsu_vx_i32m2_m(...) __riscv_vmulhsu_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i32m4(...) __riscv_vmulhsu_vx_i32m4(__VA_ARGS__) |
| #define | vmulhsu_vx_i32m4_m(...) __riscv_vmulhsu_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i32m8(...) __riscv_vmulhsu_vx_i32m8(__VA_ARGS__) |
| #define | vmulhsu_vx_i32m8_m(...) __riscv_vmulhsu_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i32mf2(...) __riscv_vmulhsu_vx_i32mf2(__VA_ARGS__) |
| #define | vmulhsu_vx_i32mf2_m(...) __riscv_vmulhsu_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i64m1(...) __riscv_vmulhsu_vx_i64m1(__VA_ARGS__) |
| #define | vmulhsu_vx_i64m1_m(...) __riscv_vmulhsu_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i64m2(...) __riscv_vmulhsu_vx_i64m2(__VA_ARGS__) |
| #define | vmulhsu_vx_i64m2_m(...) __riscv_vmulhsu_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i64m4(...) __riscv_vmulhsu_vx_i64m4(__VA_ARGS__) |
| #define | vmulhsu_vx_i64m4_m(...) __riscv_vmulhsu_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i64m8(...) __riscv_vmulhsu_vx_i64m8(__VA_ARGS__) |
| #define | vmulhsu_vx_i64m8_m(...) __riscv_vmulhsu_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i8m1(...) __riscv_vmulhsu_vx_i8m1(__VA_ARGS__) |
| #define | vmulhsu_vx_i8m1_m(...) __riscv_vmulhsu_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i8m2(...) __riscv_vmulhsu_vx_i8m2(__VA_ARGS__) |
| #define | vmulhsu_vx_i8m2_m(...) __riscv_vmulhsu_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i8m4(...) __riscv_vmulhsu_vx_i8m4(__VA_ARGS__) |
| #define | vmulhsu_vx_i8m4_m(...) __riscv_vmulhsu_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i8m8(...) __riscv_vmulhsu_vx_i8m8(__VA_ARGS__) |
| #define | vmulhsu_vx_i8m8_m(...) __riscv_vmulhsu_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i8mf2(...) __riscv_vmulhsu_vx_i8mf2(__VA_ARGS__) |
| #define | vmulhsu_vx_i8mf2_m(...) __riscv_vmulhsu_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i8mf4(...) __riscv_vmulhsu_vx_i8mf4(__VA_ARGS__) |
| #define | vmulhsu_vx_i8mf4_m(...) __riscv_vmulhsu_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vmulhsu_vx_i8mf8(...) __riscv_vmulhsu_vx_i8mf8(__VA_ARGS__) |
| #define | vmulhsu_vx_i8mf8_m(...) __riscv_vmulhsu_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u16m1(...) __riscv_vmulhu_vv_u16m1(__VA_ARGS__) |
| #define | vmulhu_vv_u16m1_m(...) __riscv_vmulhu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u16m2(...) __riscv_vmulhu_vv_u16m2(__VA_ARGS__) |
| #define | vmulhu_vv_u16m2_m(...) __riscv_vmulhu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u16m4(...) __riscv_vmulhu_vv_u16m4(__VA_ARGS__) |
| #define | vmulhu_vv_u16m4_m(...) __riscv_vmulhu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u16m8(...) __riscv_vmulhu_vv_u16m8(__VA_ARGS__) |
| #define | vmulhu_vv_u16m8_m(...) __riscv_vmulhu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u16mf2(...) __riscv_vmulhu_vv_u16mf2(__VA_ARGS__) |
| #define | vmulhu_vv_u16mf2_m(...) __riscv_vmulhu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u16mf4(...) __riscv_vmulhu_vv_u16mf4(__VA_ARGS__) |
| #define | vmulhu_vv_u16mf4_m(...) __riscv_vmulhu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u32m1(...) __riscv_vmulhu_vv_u32m1(__VA_ARGS__) |
| #define | vmulhu_vv_u32m1_m(...) __riscv_vmulhu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u32m2(...) __riscv_vmulhu_vv_u32m2(__VA_ARGS__) |
| #define | vmulhu_vv_u32m2_m(...) __riscv_vmulhu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u32m4(...) __riscv_vmulhu_vv_u32m4(__VA_ARGS__) |
| #define | vmulhu_vv_u32m4_m(...) __riscv_vmulhu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u32m8(...) __riscv_vmulhu_vv_u32m8(__VA_ARGS__) |
| #define | vmulhu_vv_u32m8_m(...) __riscv_vmulhu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u32mf2(...) __riscv_vmulhu_vv_u32mf2(__VA_ARGS__) |
| #define | vmulhu_vv_u32mf2_m(...) __riscv_vmulhu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u64m1(...) __riscv_vmulhu_vv_u64m1(__VA_ARGS__) |
| #define | vmulhu_vv_u64m1_m(...) __riscv_vmulhu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u64m2(...) __riscv_vmulhu_vv_u64m2(__VA_ARGS__) |
| #define | vmulhu_vv_u64m2_m(...) __riscv_vmulhu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u64m4(...) __riscv_vmulhu_vv_u64m4(__VA_ARGS__) |
| #define | vmulhu_vv_u64m4_m(...) __riscv_vmulhu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u64m8(...) __riscv_vmulhu_vv_u64m8(__VA_ARGS__) |
| #define | vmulhu_vv_u64m8_m(...) __riscv_vmulhu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u8m1(...) __riscv_vmulhu_vv_u8m1(__VA_ARGS__) |
| #define | vmulhu_vv_u8m1_m(...) __riscv_vmulhu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u8m2(...) __riscv_vmulhu_vv_u8m2(__VA_ARGS__) |
| #define | vmulhu_vv_u8m2_m(...) __riscv_vmulhu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u8m4(...) __riscv_vmulhu_vv_u8m4(__VA_ARGS__) |
| #define | vmulhu_vv_u8m4_m(...) __riscv_vmulhu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u8m8(...) __riscv_vmulhu_vv_u8m8(__VA_ARGS__) |
| #define | vmulhu_vv_u8m8_m(...) __riscv_vmulhu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u8mf2(...) __riscv_vmulhu_vv_u8mf2(__VA_ARGS__) |
| #define | vmulhu_vv_u8mf2_m(...) __riscv_vmulhu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u8mf4(...) __riscv_vmulhu_vv_u8mf4(__VA_ARGS__) |
| #define | vmulhu_vv_u8mf4_m(...) __riscv_vmulhu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vmulhu_vv_u8mf8(...) __riscv_vmulhu_vv_u8mf8(__VA_ARGS__) |
| #define | vmulhu_vv_u8mf8_m(...) __riscv_vmulhu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u16m1(...) __riscv_vmulhu_vx_u16m1(__VA_ARGS__) |
| #define | vmulhu_vx_u16m1_m(...) __riscv_vmulhu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u16m2(...) __riscv_vmulhu_vx_u16m2(__VA_ARGS__) |
| #define | vmulhu_vx_u16m2_m(...) __riscv_vmulhu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u16m4(...) __riscv_vmulhu_vx_u16m4(__VA_ARGS__) |
| #define | vmulhu_vx_u16m4_m(...) __riscv_vmulhu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u16m8(...) __riscv_vmulhu_vx_u16m8(__VA_ARGS__) |
| #define | vmulhu_vx_u16m8_m(...) __riscv_vmulhu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u16mf2(...) __riscv_vmulhu_vx_u16mf2(__VA_ARGS__) |
| #define | vmulhu_vx_u16mf2_m(...) __riscv_vmulhu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u16mf4(...) __riscv_vmulhu_vx_u16mf4(__VA_ARGS__) |
| #define | vmulhu_vx_u16mf4_m(...) __riscv_vmulhu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u32m1(...) __riscv_vmulhu_vx_u32m1(__VA_ARGS__) |
| #define | vmulhu_vx_u32m1_m(...) __riscv_vmulhu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u32m2(...) __riscv_vmulhu_vx_u32m2(__VA_ARGS__) |
| #define | vmulhu_vx_u32m2_m(...) __riscv_vmulhu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u32m4(...) __riscv_vmulhu_vx_u32m4(__VA_ARGS__) |
| #define | vmulhu_vx_u32m4_m(...) __riscv_vmulhu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u32m8(...) __riscv_vmulhu_vx_u32m8(__VA_ARGS__) |
| #define | vmulhu_vx_u32m8_m(...) __riscv_vmulhu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u32mf2(...) __riscv_vmulhu_vx_u32mf2(__VA_ARGS__) |
| #define | vmulhu_vx_u32mf2_m(...) __riscv_vmulhu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u64m1(...) __riscv_vmulhu_vx_u64m1(__VA_ARGS__) |
| #define | vmulhu_vx_u64m1_m(...) __riscv_vmulhu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u64m2(...) __riscv_vmulhu_vx_u64m2(__VA_ARGS__) |
| #define | vmulhu_vx_u64m2_m(...) __riscv_vmulhu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u64m4(...) __riscv_vmulhu_vx_u64m4(__VA_ARGS__) |
| #define | vmulhu_vx_u64m4_m(...) __riscv_vmulhu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u64m8(...) __riscv_vmulhu_vx_u64m8(__VA_ARGS__) |
| #define | vmulhu_vx_u64m8_m(...) __riscv_vmulhu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u8m1(...) __riscv_vmulhu_vx_u8m1(__VA_ARGS__) |
| #define | vmulhu_vx_u8m1_m(...) __riscv_vmulhu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u8m2(...) __riscv_vmulhu_vx_u8m2(__VA_ARGS__) |
| #define | vmulhu_vx_u8m2_m(...) __riscv_vmulhu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u8m4(...) __riscv_vmulhu_vx_u8m4(__VA_ARGS__) |
| #define | vmulhu_vx_u8m4_m(...) __riscv_vmulhu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u8m8(...) __riscv_vmulhu_vx_u8m8(__VA_ARGS__) |
| #define | vmulhu_vx_u8m8_m(...) __riscv_vmulhu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u8mf2(...) __riscv_vmulhu_vx_u8mf2(__VA_ARGS__) |
| #define | vmulhu_vx_u8mf2_m(...) __riscv_vmulhu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u8mf4(...) __riscv_vmulhu_vx_u8mf4(__VA_ARGS__) |
| #define | vmulhu_vx_u8mf4_m(...) __riscv_vmulhu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vmulhu_vx_u8mf8(...) __riscv_vmulhu_vx_u8mf8(__VA_ARGS__) |
| #define | vmulhu_vx_u8mf8_m(...) __riscv_vmulhu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vmv_s_x_i16m1(...) __riscv_vmv_s_x_i16m1_tu(__VA_ARGS__) |
| #define | vmv_s_x_i16m2(...) __riscv_vmv_s_x_i16m2_tu(__VA_ARGS__) |
| #define | vmv_s_x_i16m4(...) __riscv_vmv_s_x_i16m4_tu(__VA_ARGS__) |
| #define | vmv_s_x_i16m8(...) __riscv_vmv_s_x_i16m8_tu(__VA_ARGS__) |
| #define | vmv_s_x_i16mf2(...) __riscv_vmv_s_x_i16mf2_tu(__VA_ARGS__) |
| #define | vmv_s_x_i16mf4(...) __riscv_vmv_s_x_i16mf4_tu(__VA_ARGS__) |
| #define | vmv_s_x_i32m1(...) __riscv_vmv_s_x_i32m1_tu(__VA_ARGS__) |
| #define | vmv_s_x_i32m2(...) __riscv_vmv_s_x_i32m2_tu(__VA_ARGS__) |
| #define | vmv_s_x_i32m4(...) __riscv_vmv_s_x_i32m4_tu(__VA_ARGS__) |
| #define | vmv_s_x_i32m8(...) __riscv_vmv_s_x_i32m8_tu(__VA_ARGS__) |
| #define | vmv_s_x_i32mf2(...) __riscv_vmv_s_x_i32mf2_tu(__VA_ARGS__) |
| #define | vmv_s_x_i64m1(...) __riscv_vmv_s_x_i64m1_tu(__VA_ARGS__) |
| #define | vmv_s_x_i64m2(...) __riscv_vmv_s_x_i64m2_tu(__VA_ARGS__) |
| #define | vmv_s_x_i64m4(...) __riscv_vmv_s_x_i64m4_tu(__VA_ARGS__) |
| #define | vmv_s_x_i64m8(...) __riscv_vmv_s_x_i64m8_tu(__VA_ARGS__) |
| #define | vmv_s_x_i8m1(...) __riscv_vmv_s_x_i8m1_tu(__VA_ARGS__) |
| #define | vmv_s_x_i8m2(...) __riscv_vmv_s_x_i8m2_tu(__VA_ARGS__) |
| #define | vmv_s_x_i8m4(...) __riscv_vmv_s_x_i8m4_tu(__VA_ARGS__) |
| #define | vmv_s_x_i8m8(...) __riscv_vmv_s_x_i8m8_tu(__VA_ARGS__) |
| #define | vmv_s_x_i8mf2(...) __riscv_vmv_s_x_i8mf2_tu(__VA_ARGS__) |
| #define | vmv_s_x_i8mf4(...) __riscv_vmv_s_x_i8mf4_tu(__VA_ARGS__) |
| #define | vmv_s_x_i8mf8(...) __riscv_vmv_s_x_i8mf8_tu(__VA_ARGS__) |
| #define | vmv_s_x_u16m1(...) __riscv_vmv_s_x_u16m1_tu(__VA_ARGS__) |
| #define | vmv_s_x_u16m2(...) __riscv_vmv_s_x_u16m2_tu(__VA_ARGS__) |
| #define | vmv_s_x_u16m4(...) __riscv_vmv_s_x_u16m4_tu(__VA_ARGS__) |
| #define | vmv_s_x_u16m8(...) __riscv_vmv_s_x_u16m8_tu(__VA_ARGS__) |
| #define | vmv_s_x_u16mf2(...) __riscv_vmv_s_x_u16mf2_tu(__VA_ARGS__) |
| #define | vmv_s_x_u16mf4(...) __riscv_vmv_s_x_u16mf4_tu(__VA_ARGS__) |
| #define | vmv_s_x_u32m1(...) __riscv_vmv_s_x_u32m1_tu(__VA_ARGS__) |
| #define | vmv_s_x_u32m2(...) __riscv_vmv_s_x_u32m2_tu(__VA_ARGS__) |
| #define | vmv_s_x_u32m4(...) __riscv_vmv_s_x_u32m4_tu(__VA_ARGS__) |
| #define | vmv_s_x_u32m8(...) __riscv_vmv_s_x_u32m8_tu(__VA_ARGS__) |
| #define | vmv_s_x_u32mf2(...) __riscv_vmv_s_x_u32mf2_tu(__VA_ARGS__) |
| #define | vmv_s_x_u64m1(...) __riscv_vmv_s_x_u64m1_tu(__VA_ARGS__) |
| #define | vmv_s_x_u64m2(...) __riscv_vmv_s_x_u64m2_tu(__VA_ARGS__) |
| #define | vmv_s_x_u64m4(...) __riscv_vmv_s_x_u64m4_tu(__VA_ARGS__) |
| #define | vmv_s_x_u64m8(...) __riscv_vmv_s_x_u64m8_tu(__VA_ARGS__) |
| #define | vmv_s_x_u8m1(...) __riscv_vmv_s_x_u8m1_tu(__VA_ARGS__) |
| #define | vmv_s_x_u8m2(...) __riscv_vmv_s_x_u8m2_tu(__VA_ARGS__) |
| #define | vmv_s_x_u8m4(...) __riscv_vmv_s_x_u8m4_tu(__VA_ARGS__) |
| #define | vmv_s_x_u8m8(...) __riscv_vmv_s_x_u8m8_tu(__VA_ARGS__) |
| #define | vmv_s_x_u8mf2(...) __riscv_vmv_s_x_u8mf2_tu(__VA_ARGS__) |
| #define | vmv_s_x_u8mf4(...) __riscv_vmv_s_x_u8mf4_tu(__VA_ARGS__) |
| #define | vmv_s_x_u8mf8(...) __riscv_vmv_s_x_u8mf8_tu(__VA_ARGS__) |
| #define | vmv_v_v_f16m1(...) __riscv_vmv_v_v_f16m1(__VA_ARGS__) |
| #define | vmv_v_v_f16m2(...) __riscv_vmv_v_v_f16m2(__VA_ARGS__) |
| #define | vmv_v_v_f16m4(...) __riscv_vmv_v_v_f16m4(__VA_ARGS__) |
| #define | vmv_v_v_f16m8(...) __riscv_vmv_v_v_f16m8(__VA_ARGS__) |
| #define | vmv_v_v_f16mf2(...) __riscv_vmv_v_v_f16mf2(__VA_ARGS__) |
| #define | vmv_v_v_f16mf4(...) __riscv_vmv_v_v_f16mf4(__VA_ARGS__) |
| #define | vmv_v_v_f32m1(...) __riscv_vmv_v_v_f32m1(__VA_ARGS__) |
| #define | vmv_v_v_f32m2(...) __riscv_vmv_v_v_f32m2(__VA_ARGS__) |
| #define | vmv_v_v_f32m4(...) __riscv_vmv_v_v_f32m4(__VA_ARGS__) |
| #define | vmv_v_v_f32m8(...) __riscv_vmv_v_v_f32m8(__VA_ARGS__) |
| #define | vmv_v_v_f32mf2(...) __riscv_vmv_v_v_f32mf2(__VA_ARGS__) |
| #define | vmv_v_v_f64m1(...) __riscv_vmv_v_v_f64m1(__VA_ARGS__) |
| #define | vmv_v_v_f64m2(...) __riscv_vmv_v_v_f64m2(__VA_ARGS__) |
| #define | vmv_v_v_f64m4(...) __riscv_vmv_v_v_f64m4(__VA_ARGS__) |
| #define | vmv_v_v_f64m8(...) __riscv_vmv_v_v_f64m8(__VA_ARGS__) |
| #define | vmv_v_v_i16m1(...) __riscv_vmv_v_v_i16m1(__VA_ARGS__) |
| #define | vmv_v_v_i16m2(...) __riscv_vmv_v_v_i16m2(__VA_ARGS__) |
| #define | vmv_v_v_i16m4(...) __riscv_vmv_v_v_i16m4(__VA_ARGS__) |
| #define | vmv_v_v_i16m8(...) __riscv_vmv_v_v_i16m8(__VA_ARGS__) |
| #define | vmv_v_v_i16mf2(...) __riscv_vmv_v_v_i16mf2(__VA_ARGS__) |
| #define | vmv_v_v_i16mf4(...) __riscv_vmv_v_v_i16mf4(__VA_ARGS__) |
| #define | vmv_v_v_i32m1(...) __riscv_vmv_v_v_i32m1(__VA_ARGS__) |
| #define | vmv_v_v_i32m2(...) __riscv_vmv_v_v_i32m2(__VA_ARGS__) |
| #define | vmv_v_v_i32m4(...) __riscv_vmv_v_v_i32m4(__VA_ARGS__) |
| #define | vmv_v_v_i32m8(...) __riscv_vmv_v_v_i32m8(__VA_ARGS__) |
| #define | vmv_v_v_i32mf2(...) __riscv_vmv_v_v_i32mf2(__VA_ARGS__) |
| #define | vmv_v_v_i64m1(...) __riscv_vmv_v_v_i64m1(__VA_ARGS__) |
| #define | vmv_v_v_i64m2(...) __riscv_vmv_v_v_i64m2(__VA_ARGS__) |
| #define | vmv_v_v_i64m4(...) __riscv_vmv_v_v_i64m4(__VA_ARGS__) |
| #define | vmv_v_v_i64m8(...) __riscv_vmv_v_v_i64m8(__VA_ARGS__) |
| #define | vmv_v_v_i8m1(...) __riscv_vmv_v_v_i8m1(__VA_ARGS__) |
| #define | vmv_v_v_i8m2(...) __riscv_vmv_v_v_i8m2(__VA_ARGS__) |
| #define | vmv_v_v_i8m4(...) __riscv_vmv_v_v_i8m4(__VA_ARGS__) |
| #define | vmv_v_v_i8m8(...) __riscv_vmv_v_v_i8m8(__VA_ARGS__) |
| #define | vmv_v_v_i8mf2(...) __riscv_vmv_v_v_i8mf2(__VA_ARGS__) |
| #define | vmv_v_v_i8mf4(...) __riscv_vmv_v_v_i8mf4(__VA_ARGS__) |
| #define | vmv_v_v_i8mf8(...) __riscv_vmv_v_v_i8mf8(__VA_ARGS__) |
| #define | vmv_v_v_u16m1(...) __riscv_vmv_v_v_u16m1(__VA_ARGS__) |
| #define | vmv_v_v_u16m2(...) __riscv_vmv_v_v_u16m2(__VA_ARGS__) |
| #define | vmv_v_v_u16m4(...) __riscv_vmv_v_v_u16m4(__VA_ARGS__) |
| #define | vmv_v_v_u16m8(...) __riscv_vmv_v_v_u16m8(__VA_ARGS__) |
| #define | vmv_v_v_u16mf2(...) __riscv_vmv_v_v_u16mf2(__VA_ARGS__) |
| #define | vmv_v_v_u16mf4(...) __riscv_vmv_v_v_u16mf4(__VA_ARGS__) |
| #define | vmv_v_v_u32m1(...) __riscv_vmv_v_v_u32m1(__VA_ARGS__) |
| #define | vmv_v_v_u32m2(...) __riscv_vmv_v_v_u32m2(__VA_ARGS__) |
| #define | vmv_v_v_u32m4(...) __riscv_vmv_v_v_u32m4(__VA_ARGS__) |
| #define | vmv_v_v_u32m8(...) __riscv_vmv_v_v_u32m8(__VA_ARGS__) |
| #define | vmv_v_v_u32mf2(...) __riscv_vmv_v_v_u32mf2(__VA_ARGS__) |
| #define | vmv_v_v_u64m1(...) __riscv_vmv_v_v_u64m1(__VA_ARGS__) |
| #define | vmv_v_v_u64m2(...) __riscv_vmv_v_v_u64m2(__VA_ARGS__) |
| #define | vmv_v_v_u64m4(...) __riscv_vmv_v_v_u64m4(__VA_ARGS__) |
| #define | vmv_v_v_u64m8(...) __riscv_vmv_v_v_u64m8(__VA_ARGS__) |
| #define | vmv_v_v_u8m1(...) __riscv_vmv_v_v_u8m1(__VA_ARGS__) |
| #define | vmv_v_v_u8m2(...) __riscv_vmv_v_v_u8m2(__VA_ARGS__) |
| #define | vmv_v_v_u8m4(...) __riscv_vmv_v_v_u8m4(__VA_ARGS__) |
| #define | vmv_v_v_u8m8(...) __riscv_vmv_v_v_u8m8(__VA_ARGS__) |
| #define | vmv_v_v_u8mf2(...) __riscv_vmv_v_v_u8mf2(__VA_ARGS__) |
| #define | vmv_v_v_u8mf4(...) __riscv_vmv_v_v_u8mf4(__VA_ARGS__) |
| #define | vmv_v_v_u8mf8(...) __riscv_vmv_v_v_u8mf8(__VA_ARGS__) |
| #define | vmv_v_x_i16m1(...) __riscv_vmv_v_x_i16m1(__VA_ARGS__) |
| #define | vmv_v_x_i16m2(...) __riscv_vmv_v_x_i16m2(__VA_ARGS__) |
| #define | vmv_v_x_i16m4(...) __riscv_vmv_v_x_i16m4(__VA_ARGS__) |
| #define | vmv_v_x_i16m8(...) __riscv_vmv_v_x_i16m8(__VA_ARGS__) |
| #define | vmv_v_x_i16mf2(...) __riscv_vmv_v_x_i16mf2(__VA_ARGS__) |
| #define | vmv_v_x_i16mf4(...) __riscv_vmv_v_x_i16mf4(__VA_ARGS__) |
| #define | vmv_v_x_i32m1(...) __riscv_vmv_v_x_i32m1(__VA_ARGS__) |
| #define | vmv_v_x_i32m2(...) __riscv_vmv_v_x_i32m2(__VA_ARGS__) |
| #define | vmv_v_x_i32m4(...) __riscv_vmv_v_x_i32m4(__VA_ARGS__) |
| #define | vmv_v_x_i32m8(...) __riscv_vmv_v_x_i32m8(__VA_ARGS__) |
| #define | vmv_v_x_i32mf2(...) __riscv_vmv_v_x_i32mf2(__VA_ARGS__) |
| #define | vmv_v_x_i64m1(...) __riscv_vmv_v_x_i64m1(__VA_ARGS__) |
| #define | vmv_v_x_i64m2(...) __riscv_vmv_v_x_i64m2(__VA_ARGS__) |
| #define | vmv_v_x_i64m4(...) __riscv_vmv_v_x_i64m4(__VA_ARGS__) |
| #define | vmv_v_x_i64m8(...) __riscv_vmv_v_x_i64m8(__VA_ARGS__) |
| #define | vmv_v_x_i8m1(...) __riscv_vmv_v_x_i8m1(__VA_ARGS__) |
| #define | vmv_v_x_i8m2(...) __riscv_vmv_v_x_i8m2(__VA_ARGS__) |
| #define | vmv_v_x_i8m4(...) __riscv_vmv_v_x_i8m4(__VA_ARGS__) |
| #define | vmv_v_x_i8m8(...) __riscv_vmv_v_x_i8m8(__VA_ARGS__) |
| #define | vmv_v_x_i8mf2(...) __riscv_vmv_v_x_i8mf2(__VA_ARGS__) |
| #define | vmv_v_x_i8mf4(...) __riscv_vmv_v_x_i8mf4(__VA_ARGS__) |
| #define | vmv_v_x_i8mf8(...) __riscv_vmv_v_x_i8mf8(__VA_ARGS__) |
| #define | vmv_v_x_u16m1(...) __riscv_vmv_v_x_u16m1(__VA_ARGS__) |
| #define | vmv_v_x_u16m2(...) __riscv_vmv_v_x_u16m2(__VA_ARGS__) |
| #define | vmv_v_x_u16m4(...) __riscv_vmv_v_x_u16m4(__VA_ARGS__) |
| #define | vmv_v_x_u16m8(...) __riscv_vmv_v_x_u16m8(__VA_ARGS__) |
| #define | vmv_v_x_u16mf2(...) __riscv_vmv_v_x_u16mf2(__VA_ARGS__) |
| #define | vmv_v_x_u16mf4(...) __riscv_vmv_v_x_u16mf4(__VA_ARGS__) |
| #define | vmv_v_x_u32m1(...) __riscv_vmv_v_x_u32m1(__VA_ARGS__) |
| #define | vmv_v_x_u32m2(...) __riscv_vmv_v_x_u32m2(__VA_ARGS__) |
| #define | vmv_v_x_u32m4(...) __riscv_vmv_v_x_u32m4(__VA_ARGS__) |
| #define | vmv_v_x_u32m8(...) __riscv_vmv_v_x_u32m8(__VA_ARGS__) |
| #define | vmv_v_x_u32mf2(...) __riscv_vmv_v_x_u32mf2(__VA_ARGS__) |
| #define | vmv_v_x_u64m1(...) __riscv_vmv_v_x_u64m1(__VA_ARGS__) |
| #define | vmv_v_x_u64m2(...) __riscv_vmv_v_x_u64m2(__VA_ARGS__) |
| #define | vmv_v_x_u64m4(...) __riscv_vmv_v_x_u64m4(__VA_ARGS__) |
| #define | vmv_v_x_u64m8(...) __riscv_vmv_v_x_u64m8(__VA_ARGS__) |
| #define | vmv_v_x_u8m1(...) __riscv_vmv_v_x_u8m1(__VA_ARGS__) |
| #define | vmv_v_x_u8m2(...) __riscv_vmv_v_x_u8m2(__VA_ARGS__) |
| #define | vmv_v_x_u8m4(...) __riscv_vmv_v_x_u8m4(__VA_ARGS__) |
| #define | vmv_v_x_u8m8(...) __riscv_vmv_v_x_u8m8(__VA_ARGS__) |
| #define | vmv_v_x_u8mf2(...) __riscv_vmv_v_x_u8mf2(__VA_ARGS__) |
| #define | vmv_v_x_u8mf4(...) __riscv_vmv_v_x_u8mf4(__VA_ARGS__) |
| #define | vmv_v_x_u8mf8(...) __riscv_vmv_v_x_u8mf8(__VA_ARGS__) |
| #define | vmv_x_s_i16m1_i16(...) __riscv_vmv_x_s_i16m1_i16(__VA_ARGS__) |
| #define | vmv_x_s_i16m2_i16(...) __riscv_vmv_x_s_i16m2_i16(__VA_ARGS__) |
| #define | vmv_x_s_i16m4_i16(...) __riscv_vmv_x_s_i16m4_i16(__VA_ARGS__) |
| #define | vmv_x_s_i16m8_i16(...) __riscv_vmv_x_s_i16m8_i16(__VA_ARGS__) |
| #define | vmv_x_s_i16mf2_i16(...) __riscv_vmv_x_s_i16mf2_i16(__VA_ARGS__) |
| #define | vmv_x_s_i16mf4_i16(...) __riscv_vmv_x_s_i16mf4_i16(__VA_ARGS__) |
| #define | vmv_x_s_i32m1_i32(...) __riscv_vmv_x_s_i32m1_i32(__VA_ARGS__) |
| #define | vmv_x_s_i32m2_i32(...) __riscv_vmv_x_s_i32m2_i32(__VA_ARGS__) |
| #define | vmv_x_s_i32m4_i32(...) __riscv_vmv_x_s_i32m4_i32(__VA_ARGS__) |
| #define | vmv_x_s_i32m8_i32(...) __riscv_vmv_x_s_i32m8_i32(__VA_ARGS__) |
| #define | vmv_x_s_i32mf2_i32(...) __riscv_vmv_x_s_i32mf2_i32(__VA_ARGS__) |
| #define | vmv_x_s_i64m1_i64(...) __riscv_vmv_x_s_i64m1_i64(__VA_ARGS__) |
| #define | vmv_x_s_i64m2_i64(...) __riscv_vmv_x_s_i64m2_i64(__VA_ARGS__) |
| #define | vmv_x_s_i64m4_i64(...) __riscv_vmv_x_s_i64m4_i64(__VA_ARGS__) |
| #define | vmv_x_s_i64m8_i64(...) __riscv_vmv_x_s_i64m8_i64(__VA_ARGS__) |
| #define | vmv_x_s_i8m1_i8(...) __riscv_vmv_x_s_i8m1_i8(__VA_ARGS__) |
| #define | vmv_x_s_i8m2_i8(...) __riscv_vmv_x_s_i8m2_i8(__VA_ARGS__) |
| #define | vmv_x_s_i8m4_i8(...) __riscv_vmv_x_s_i8m4_i8(__VA_ARGS__) |
| #define | vmv_x_s_i8m8_i8(...) __riscv_vmv_x_s_i8m8_i8(__VA_ARGS__) |
| #define | vmv_x_s_i8mf2_i8(...) __riscv_vmv_x_s_i8mf2_i8(__VA_ARGS__) |
| #define | vmv_x_s_i8mf4_i8(...) __riscv_vmv_x_s_i8mf4_i8(__VA_ARGS__) |
| #define | vmv_x_s_i8mf8_i8(...) __riscv_vmv_x_s_i8mf8_i8(__VA_ARGS__) |
| #define | vmv_x_s_u16m1_u16(...) __riscv_vmv_x_s_u16m1_u16(__VA_ARGS__) |
| #define | vmv_x_s_u16m2_u16(...) __riscv_vmv_x_s_u16m2_u16(__VA_ARGS__) |
| #define | vmv_x_s_u16m4_u16(...) __riscv_vmv_x_s_u16m4_u16(__VA_ARGS__) |
| #define | vmv_x_s_u16m8_u16(...) __riscv_vmv_x_s_u16m8_u16(__VA_ARGS__) |
| #define | vmv_x_s_u16mf2_u16(...) __riscv_vmv_x_s_u16mf2_u16(__VA_ARGS__) |
| #define | vmv_x_s_u16mf4_u16(...) __riscv_vmv_x_s_u16mf4_u16(__VA_ARGS__) |
| #define | vmv_x_s_u32m1_u32(...) __riscv_vmv_x_s_u32m1_u32(__VA_ARGS__) |
| #define | vmv_x_s_u32m2_u32(...) __riscv_vmv_x_s_u32m2_u32(__VA_ARGS__) |
| #define | vmv_x_s_u32m4_u32(...) __riscv_vmv_x_s_u32m4_u32(__VA_ARGS__) |
| #define | vmv_x_s_u32m8_u32(...) __riscv_vmv_x_s_u32m8_u32(__VA_ARGS__) |
| #define | vmv_x_s_u32mf2_u32(...) __riscv_vmv_x_s_u32mf2_u32(__VA_ARGS__) |
| #define | vmv_x_s_u64m1_u64(...) __riscv_vmv_x_s_u64m1_u64(__VA_ARGS__) |
| #define | vmv_x_s_u64m2_u64(...) __riscv_vmv_x_s_u64m2_u64(__VA_ARGS__) |
| #define | vmv_x_s_u64m4_u64(...) __riscv_vmv_x_s_u64m4_u64(__VA_ARGS__) |
| #define | vmv_x_s_u64m8_u64(...) __riscv_vmv_x_s_u64m8_u64(__VA_ARGS__) |
| #define | vmv_x_s_u8m1_u8(...) __riscv_vmv_x_s_u8m1_u8(__VA_ARGS__) |
| #define | vmv_x_s_u8m2_u8(...) __riscv_vmv_x_s_u8m2_u8(__VA_ARGS__) |
| #define | vmv_x_s_u8m4_u8(...) __riscv_vmv_x_s_u8m4_u8(__VA_ARGS__) |
| #define | vmv_x_s_u8m8_u8(...) __riscv_vmv_x_s_u8m8_u8(__VA_ARGS__) |
| #define | vmv_x_s_u8mf2_u8(...) __riscv_vmv_x_s_u8mf2_u8(__VA_ARGS__) |
| #define | vmv_x_s_u8mf4_u8(...) __riscv_vmv_x_s_u8mf4_u8(__VA_ARGS__) |
| #define | vmv_x_s_u8mf8_u8(...) __riscv_vmv_x_s_u8mf8_u8(__VA_ARGS__) |
| #define | vmxnor_mm_b1(...) __riscv_vmxnor_mm_b1(__VA_ARGS__) |
| #define | vmxnor_mm_b16(...) __riscv_vmxnor_mm_b16(__VA_ARGS__) |
| #define | vmxnor_mm_b2(...) __riscv_vmxnor_mm_b2(__VA_ARGS__) |
| #define | vmxnor_mm_b32(...) __riscv_vmxnor_mm_b32(__VA_ARGS__) |
| #define | vmxnor_mm_b4(...) __riscv_vmxnor_mm_b4(__VA_ARGS__) |
| #define | vmxnor_mm_b64(...) __riscv_vmxnor_mm_b64(__VA_ARGS__) |
| #define | vmxnor_mm_b8(...) __riscv_vmxnor_mm_b8(__VA_ARGS__) |
| #define | vmxor_mm_b1(...) __riscv_vmxor_mm_b1(__VA_ARGS__) |
| #define | vmxor_mm_b16(...) __riscv_vmxor_mm_b16(__VA_ARGS__) |
| #define | vmxor_mm_b2(...) __riscv_vmxor_mm_b2(__VA_ARGS__) |
| #define | vmxor_mm_b32(...) __riscv_vmxor_mm_b32(__VA_ARGS__) |
| #define | vmxor_mm_b4(...) __riscv_vmxor_mm_b4(__VA_ARGS__) |
| #define | vmxor_mm_b64(...) __riscv_vmxor_mm_b64(__VA_ARGS__) |
| #define | vmxor_mm_b8(...) __riscv_vmxor_mm_b8(__VA_ARGS__) |
| #define | vnclip_wv_i16m1(...) __riscv_vnclip_wv_i16m1(__VA_ARGS__) |
| #define | vnclip_wv_i16m1_m(...) __riscv_vnclip_wv_i16m1_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i16m2(...) __riscv_vnclip_wv_i16m2(__VA_ARGS__) |
| #define | vnclip_wv_i16m2_m(...) __riscv_vnclip_wv_i16m2_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i16m4(...) __riscv_vnclip_wv_i16m4(__VA_ARGS__) |
| #define | vnclip_wv_i16m4_m(...) __riscv_vnclip_wv_i16m4_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i16mf2(...) __riscv_vnclip_wv_i16mf2(__VA_ARGS__) |
| #define | vnclip_wv_i16mf2_m(...) __riscv_vnclip_wv_i16mf2_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i16mf4(...) __riscv_vnclip_wv_i16mf4(__VA_ARGS__) |
| #define | vnclip_wv_i16mf4_m(...) __riscv_vnclip_wv_i16mf4_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i32m1(...) __riscv_vnclip_wv_i32m1(__VA_ARGS__) |
| #define | vnclip_wv_i32m1_m(...) __riscv_vnclip_wv_i32m1_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i32m2(...) __riscv_vnclip_wv_i32m2(__VA_ARGS__) |
| #define | vnclip_wv_i32m2_m(...) __riscv_vnclip_wv_i32m2_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i32m4(...) __riscv_vnclip_wv_i32m4(__VA_ARGS__) |
| #define | vnclip_wv_i32m4_m(...) __riscv_vnclip_wv_i32m4_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i32mf2(...) __riscv_vnclip_wv_i32mf2(__VA_ARGS__) |
| #define | vnclip_wv_i32mf2_m(...) __riscv_vnclip_wv_i32mf2_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i8m1(...) __riscv_vnclip_wv_i8m1(__VA_ARGS__) |
| #define | vnclip_wv_i8m1_m(...) __riscv_vnclip_wv_i8m1_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i8m2(...) __riscv_vnclip_wv_i8m2(__VA_ARGS__) |
| #define | vnclip_wv_i8m2_m(...) __riscv_vnclip_wv_i8m2_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i8m4(...) __riscv_vnclip_wv_i8m4(__VA_ARGS__) |
| #define | vnclip_wv_i8m4_m(...) __riscv_vnclip_wv_i8m4_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i8mf2(...) __riscv_vnclip_wv_i8mf2(__VA_ARGS__) |
| #define | vnclip_wv_i8mf2_m(...) __riscv_vnclip_wv_i8mf2_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i8mf4(...) __riscv_vnclip_wv_i8mf4(__VA_ARGS__) |
| #define | vnclip_wv_i8mf4_m(...) __riscv_vnclip_wv_i8mf4_tumu(__VA_ARGS__) |
| #define | vnclip_wv_i8mf8(...) __riscv_vnclip_wv_i8mf8(__VA_ARGS__) |
| #define | vnclip_wv_i8mf8_m(...) __riscv_vnclip_wv_i8mf8_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i16m1(...) __riscv_vnclip_wx_i16m1(__VA_ARGS__) |
| #define | vnclip_wx_i16m1_m(...) __riscv_vnclip_wx_i16m1_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i16m2(...) __riscv_vnclip_wx_i16m2(__VA_ARGS__) |
| #define | vnclip_wx_i16m2_m(...) __riscv_vnclip_wx_i16m2_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i16m4(...) __riscv_vnclip_wx_i16m4(__VA_ARGS__) |
| #define | vnclip_wx_i16m4_m(...) __riscv_vnclip_wx_i16m4_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i16mf2(...) __riscv_vnclip_wx_i16mf2(__VA_ARGS__) |
| #define | vnclip_wx_i16mf2_m(...) __riscv_vnclip_wx_i16mf2_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i16mf4(...) __riscv_vnclip_wx_i16mf4(__VA_ARGS__) |
| #define | vnclip_wx_i16mf4_m(...) __riscv_vnclip_wx_i16mf4_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i32m1(...) __riscv_vnclip_wx_i32m1(__VA_ARGS__) |
| #define | vnclip_wx_i32m1_m(...) __riscv_vnclip_wx_i32m1_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i32m2(...) __riscv_vnclip_wx_i32m2(__VA_ARGS__) |
| #define | vnclip_wx_i32m2_m(...) __riscv_vnclip_wx_i32m2_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i32m4(...) __riscv_vnclip_wx_i32m4(__VA_ARGS__) |
| #define | vnclip_wx_i32m4_m(...) __riscv_vnclip_wx_i32m4_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i32mf2(...) __riscv_vnclip_wx_i32mf2(__VA_ARGS__) |
| #define | vnclip_wx_i32mf2_m(...) __riscv_vnclip_wx_i32mf2_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i8m1(...) __riscv_vnclip_wx_i8m1(__VA_ARGS__) |
| #define | vnclip_wx_i8m1_m(...) __riscv_vnclip_wx_i8m1_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i8m2(...) __riscv_vnclip_wx_i8m2(__VA_ARGS__) |
| #define | vnclip_wx_i8m2_m(...) __riscv_vnclip_wx_i8m2_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i8m4(...) __riscv_vnclip_wx_i8m4(__VA_ARGS__) |
| #define | vnclip_wx_i8m4_m(...) __riscv_vnclip_wx_i8m4_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i8mf2(...) __riscv_vnclip_wx_i8mf2(__VA_ARGS__) |
| #define | vnclip_wx_i8mf2_m(...) __riscv_vnclip_wx_i8mf2_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i8mf4(...) __riscv_vnclip_wx_i8mf4(__VA_ARGS__) |
| #define | vnclip_wx_i8mf4_m(...) __riscv_vnclip_wx_i8mf4_tumu(__VA_ARGS__) |
| #define | vnclip_wx_i8mf8(...) __riscv_vnclip_wx_i8mf8(__VA_ARGS__) |
| #define | vnclip_wx_i8mf8_m(...) __riscv_vnclip_wx_i8mf8_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u16m1(...) __riscv_vnclipu_wv_u16m1(__VA_ARGS__) |
| #define | vnclipu_wv_u16m1_m(...) __riscv_vnclipu_wv_u16m1_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u16m2(...) __riscv_vnclipu_wv_u16m2(__VA_ARGS__) |
| #define | vnclipu_wv_u16m2_m(...) __riscv_vnclipu_wv_u16m2_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u16m4(...) __riscv_vnclipu_wv_u16m4(__VA_ARGS__) |
| #define | vnclipu_wv_u16m4_m(...) __riscv_vnclipu_wv_u16m4_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u16mf2(...) __riscv_vnclipu_wv_u16mf2(__VA_ARGS__) |
| #define | vnclipu_wv_u16mf2_m(...) __riscv_vnclipu_wv_u16mf2_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u16mf4(...) __riscv_vnclipu_wv_u16mf4(__VA_ARGS__) |
| #define | vnclipu_wv_u16mf4_m(...) __riscv_vnclipu_wv_u16mf4_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u32m1(...) __riscv_vnclipu_wv_u32m1(__VA_ARGS__) |
| #define | vnclipu_wv_u32m1_m(...) __riscv_vnclipu_wv_u32m1_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u32m2(...) __riscv_vnclipu_wv_u32m2(__VA_ARGS__) |
| #define | vnclipu_wv_u32m2_m(...) __riscv_vnclipu_wv_u32m2_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u32m4(...) __riscv_vnclipu_wv_u32m4(__VA_ARGS__) |
| #define | vnclipu_wv_u32m4_m(...) __riscv_vnclipu_wv_u32m4_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u32mf2(...) __riscv_vnclipu_wv_u32mf2(__VA_ARGS__) |
| #define | vnclipu_wv_u32mf2_m(...) __riscv_vnclipu_wv_u32mf2_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u8m1(...) __riscv_vnclipu_wv_u8m1(__VA_ARGS__) |
| #define | vnclipu_wv_u8m1_m(...) __riscv_vnclipu_wv_u8m1_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u8m2(...) __riscv_vnclipu_wv_u8m2(__VA_ARGS__) |
| #define | vnclipu_wv_u8m2_m(...) __riscv_vnclipu_wv_u8m2_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u8m4(...) __riscv_vnclipu_wv_u8m4(__VA_ARGS__) |
| #define | vnclipu_wv_u8m4_m(...) __riscv_vnclipu_wv_u8m4_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u8mf2(...) __riscv_vnclipu_wv_u8mf2(__VA_ARGS__) |
| #define | vnclipu_wv_u8mf2_m(...) __riscv_vnclipu_wv_u8mf2_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u8mf4(...) __riscv_vnclipu_wv_u8mf4(__VA_ARGS__) |
| #define | vnclipu_wv_u8mf4_m(...) __riscv_vnclipu_wv_u8mf4_tumu(__VA_ARGS__) |
| #define | vnclipu_wv_u8mf8(...) __riscv_vnclipu_wv_u8mf8(__VA_ARGS__) |
| #define | vnclipu_wv_u8mf8_m(...) __riscv_vnclipu_wv_u8mf8_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u16m1(...) __riscv_vnclipu_wx_u16m1(__VA_ARGS__) |
| #define | vnclipu_wx_u16m1_m(...) __riscv_vnclipu_wx_u16m1_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u16m2(...) __riscv_vnclipu_wx_u16m2(__VA_ARGS__) |
| #define | vnclipu_wx_u16m2_m(...) __riscv_vnclipu_wx_u16m2_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u16m4(...) __riscv_vnclipu_wx_u16m4(__VA_ARGS__) |
| #define | vnclipu_wx_u16m4_m(...) __riscv_vnclipu_wx_u16m4_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u16mf2(...) __riscv_vnclipu_wx_u16mf2(__VA_ARGS__) |
| #define | vnclipu_wx_u16mf2_m(...) __riscv_vnclipu_wx_u16mf2_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u16mf4(...) __riscv_vnclipu_wx_u16mf4(__VA_ARGS__) |
| #define | vnclipu_wx_u16mf4_m(...) __riscv_vnclipu_wx_u16mf4_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u32m1(...) __riscv_vnclipu_wx_u32m1(__VA_ARGS__) |
| #define | vnclipu_wx_u32m1_m(...) __riscv_vnclipu_wx_u32m1_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u32m2(...) __riscv_vnclipu_wx_u32m2(__VA_ARGS__) |
| #define | vnclipu_wx_u32m2_m(...) __riscv_vnclipu_wx_u32m2_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u32m4(...) __riscv_vnclipu_wx_u32m4(__VA_ARGS__) |
| #define | vnclipu_wx_u32m4_m(...) __riscv_vnclipu_wx_u32m4_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u32mf2(...) __riscv_vnclipu_wx_u32mf2(__VA_ARGS__) |
| #define | vnclipu_wx_u32mf2_m(...) __riscv_vnclipu_wx_u32mf2_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u8m1(...) __riscv_vnclipu_wx_u8m1(__VA_ARGS__) |
| #define | vnclipu_wx_u8m1_m(...) __riscv_vnclipu_wx_u8m1_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u8m2(...) __riscv_vnclipu_wx_u8m2(__VA_ARGS__) |
| #define | vnclipu_wx_u8m2_m(...) __riscv_vnclipu_wx_u8m2_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u8m4(...) __riscv_vnclipu_wx_u8m4(__VA_ARGS__) |
| #define | vnclipu_wx_u8m4_m(...) __riscv_vnclipu_wx_u8m4_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u8mf2(...) __riscv_vnclipu_wx_u8mf2(__VA_ARGS__) |
| #define | vnclipu_wx_u8mf2_m(...) __riscv_vnclipu_wx_u8mf2_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u8mf4(...) __riscv_vnclipu_wx_u8mf4(__VA_ARGS__) |
| #define | vnclipu_wx_u8mf4_m(...) __riscv_vnclipu_wx_u8mf4_tumu(__VA_ARGS__) |
| #define | vnclipu_wx_u8mf8(...) __riscv_vnclipu_wx_u8mf8(__VA_ARGS__) |
| #define | vnclipu_wx_u8mf8_m(...) __riscv_vnclipu_wx_u8mf8_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16m1(...) __riscv_vncvt_x_x_w_i16m1(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16m1_m(...) __riscv_vncvt_x_x_w_i16m1_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16m2(...) __riscv_vncvt_x_x_w_i16m2(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16m2_m(...) __riscv_vncvt_x_x_w_i16m2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16m4(...) __riscv_vncvt_x_x_w_i16m4(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16m4_m(...) __riscv_vncvt_x_x_w_i16m4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16mf2(...) __riscv_vncvt_x_x_w_i16mf2(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16mf2_m(...) __riscv_vncvt_x_x_w_i16mf2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16mf4(...) __riscv_vncvt_x_x_w_i16mf4(__VA_ARGS__) |
| #define | vncvt_x_x_w_i16mf4_m(...) __riscv_vncvt_x_x_w_i16mf4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i32m1(...) __riscv_vncvt_x_x_w_i32m1(__VA_ARGS__) |
| #define | vncvt_x_x_w_i32m1_m(...) __riscv_vncvt_x_x_w_i32m1_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i32m2(...) __riscv_vncvt_x_x_w_i32m2(__VA_ARGS__) |
| #define | vncvt_x_x_w_i32m2_m(...) __riscv_vncvt_x_x_w_i32m2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i32m4(...) __riscv_vncvt_x_x_w_i32m4(__VA_ARGS__) |
| #define | vncvt_x_x_w_i32m4_m(...) __riscv_vncvt_x_x_w_i32m4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i32mf2(...) __riscv_vncvt_x_x_w_i32mf2(__VA_ARGS__) |
| #define | vncvt_x_x_w_i32mf2_m(...) __riscv_vncvt_x_x_w_i32mf2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8m1(...) __riscv_vncvt_x_x_w_i8m1(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8m1_m(...) __riscv_vncvt_x_x_w_i8m1_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8m2(...) __riscv_vncvt_x_x_w_i8m2(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8m2_m(...) __riscv_vncvt_x_x_w_i8m2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8m4(...) __riscv_vncvt_x_x_w_i8m4(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8m4_m(...) __riscv_vncvt_x_x_w_i8m4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8mf2(...) __riscv_vncvt_x_x_w_i8mf2(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8mf2_m(...) __riscv_vncvt_x_x_w_i8mf2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8mf4(...) __riscv_vncvt_x_x_w_i8mf4(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8mf4_m(...) __riscv_vncvt_x_x_w_i8mf4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8mf8(...) __riscv_vncvt_x_x_w_i8mf8(__VA_ARGS__) |
| #define | vncvt_x_x_w_i8mf8_m(...) __riscv_vncvt_x_x_w_i8mf8_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16m1(...) __riscv_vncvt_x_x_w_u16m1(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16m1_m(...) __riscv_vncvt_x_x_w_u16m1_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16m2(...) __riscv_vncvt_x_x_w_u16m2(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16m2_m(...) __riscv_vncvt_x_x_w_u16m2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16m4(...) __riscv_vncvt_x_x_w_u16m4(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16m4_m(...) __riscv_vncvt_x_x_w_u16m4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16mf2(...) __riscv_vncvt_x_x_w_u16mf2(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16mf2_m(...) __riscv_vncvt_x_x_w_u16mf2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16mf4(...) __riscv_vncvt_x_x_w_u16mf4(__VA_ARGS__) |
| #define | vncvt_x_x_w_u16mf4_m(...) __riscv_vncvt_x_x_w_u16mf4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u32m1(...) __riscv_vncvt_x_x_w_u32m1(__VA_ARGS__) |
| #define | vncvt_x_x_w_u32m1_m(...) __riscv_vncvt_x_x_w_u32m1_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u32m2(...) __riscv_vncvt_x_x_w_u32m2(__VA_ARGS__) |
| #define | vncvt_x_x_w_u32m2_m(...) __riscv_vncvt_x_x_w_u32m2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u32m4(...) __riscv_vncvt_x_x_w_u32m4(__VA_ARGS__) |
| #define | vncvt_x_x_w_u32m4_m(...) __riscv_vncvt_x_x_w_u32m4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u32mf2(...) __riscv_vncvt_x_x_w_u32mf2(__VA_ARGS__) |
| #define | vncvt_x_x_w_u32mf2_m(...) __riscv_vncvt_x_x_w_u32mf2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8m1(...) __riscv_vncvt_x_x_w_u8m1(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8m1_m(...) __riscv_vncvt_x_x_w_u8m1_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8m2(...) __riscv_vncvt_x_x_w_u8m2(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8m2_m(...) __riscv_vncvt_x_x_w_u8m2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8m4(...) __riscv_vncvt_x_x_w_u8m4(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8m4_m(...) __riscv_vncvt_x_x_w_u8m4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8mf2(...) __riscv_vncvt_x_x_w_u8mf2(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8mf2_m(...) __riscv_vncvt_x_x_w_u8mf2_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8mf4(...) __riscv_vncvt_x_x_w_u8mf4(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8mf4_m(...) __riscv_vncvt_x_x_w_u8mf4_tumu(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8mf8(...) __riscv_vncvt_x_x_w_u8mf8(__VA_ARGS__) |
| #define | vncvt_x_x_w_u8mf8_m(...) __riscv_vncvt_x_x_w_u8mf8_tumu(__VA_ARGS__) |
| #define | vneg_v_i16m1(...) __riscv_vneg_v_i16m1(__VA_ARGS__) |
| #define | vneg_v_i16m1_m(...) __riscv_vneg_v_i16m1_tumu(__VA_ARGS__) |
| #define | vneg_v_i16m2(...) __riscv_vneg_v_i16m2(__VA_ARGS__) |
| #define | vneg_v_i16m2_m(...) __riscv_vneg_v_i16m2_tumu(__VA_ARGS__) |
| #define | vneg_v_i16m4(...) __riscv_vneg_v_i16m4(__VA_ARGS__) |
| #define | vneg_v_i16m4_m(...) __riscv_vneg_v_i16m4_tumu(__VA_ARGS__) |
| #define | vneg_v_i16m8(...) __riscv_vneg_v_i16m8(__VA_ARGS__) |
| #define | vneg_v_i16m8_m(...) __riscv_vneg_v_i16m8_tumu(__VA_ARGS__) |
| #define | vneg_v_i16mf2(...) __riscv_vneg_v_i16mf2(__VA_ARGS__) |
| #define | vneg_v_i16mf2_m(...) __riscv_vneg_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vneg_v_i16mf4(...) __riscv_vneg_v_i16mf4(__VA_ARGS__) |
| #define | vneg_v_i16mf4_m(...) __riscv_vneg_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vneg_v_i32m1(...) __riscv_vneg_v_i32m1(__VA_ARGS__) |
| #define | vneg_v_i32m1_m(...) __riscv_vneg_v_i32m1_tumu(__VA_ARGS__) |
| #define | vneg_v_i32m2(...) __riscv_vneg_v_i32m2(__VA_ARGS__) |
| #define | vneg_v_i32m2_m(...) __riscv_vneg_v_i32m2_tumu(__VA_ARGS__) |
| #define | vneg_v_i32m4(...) __riscv_vneg_v_i32m4(__VA_ARGS__) |
| #define | vneg_v_i32m4_m(...) __riscv_vneg_v_i32m4_tumu(__VA_ARGS__) |
| #define | vneg_v_i32m8(...) __riscv_vneg_v_i32m8(__VA_ARGS__) |
| #define | vneg_v_i32m8_m(...) __riscv_vneg_v_i32m8_tumu(__VA_ARGS__) |
| #define | vneg_v_i32mf2(...) __riscv_vneg_v_i32mf2(__VA_ARGS__) |
| #define | vneg_v_i32mf2_m(...) __riscv_vneg_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vneg_v_i64m1(...) __riscv_vneg_v_i64m1(__VA_ARGS__) |
| #define | vneg_v_i64m1_m(...) __riscv_vneg_v_i64m1_tumu(__VA_ARGS__) |
| #define | vneg_v_i64m2(...) __riscv_vneg_v_i64m2(__VA_ARGS__) |
| #define | vneg_v_i64m2_m(...) __riscv_vneg_v_i64m2_tumu(__VA_ARGS__) |
| #define | vneg_v_i64m4(...) __riscv_vneg_v_i64m4(__VA_ARGS__) |
| #define | vneg_v_i64m4_m(...) __riscv_vneg_v_i64m4_tumu(__VA_ARGS__) |
| #define | vneg_v_i64m8(...) __riscv_vneg_v_i64m8(__VA_ARGS__) |
| #define | vneg_v_i64m8_m(...) __riscv_vneg_v_i64m8_tumu(__VA_ARGS__) |
| #define | vneg_v_i8m1(...) __riscv_vneg_v_i8m1(__VA_ARGS__) |
| #define | vneg_v_i8m1_m(...) __riscv_vneg_v_i8m1_tumu(__VA_ARGS__) |
| #define | vneg_v_i8m2(...) __riscv_vneg_v_i8m2(__VA_ARGS__) |
| #define | vneg_v_i8m2_m(...) __riscv_vneg_v_i8m2_tumu(__VA_ARGS__) |
| #define | vneg_v_i8m4(...) __riscv_vneg_v_i8m4(__VA_ARGS__) |
| #define | vneg_v_i8m4_m(...) __riscv_vneg_v_i8m4_tumu(__VA_ARGS__) |
| #define | vneg_v_i8m8(...) __riscv_vneg_v_i8m8(__VA_ARGS__) |
| #define | vneg_v_i8m8_m(...) __riscv_vneg_v_i8m8_tumu(__VA_ARGS__) |
| #define | vneg_v_i8mf2(...) __riscv_vneg_v_i8mf2(__VA_ARGS__) |
| #define | vneg_v_i8mf2_m(...) __riscv_vneg_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vneg_v_i8mf4(...) __riscv_vneg_v_i8mf4(__VA_ARGS__) |
| #define | vneg_v_i8mf4_m(...) __riscv_vneg_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vneg_v_i8mf8(...) __riscv_vneg_v_i8mf8(__VA_ARGS__) |
| #define | vneg_v_i8mf8_m(...) __riscv_vneg_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i16m1(...) __riscv_vnmsac_vv_i16m1_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i16m1_m(...) __riscv_vnmsac_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i16m2(...) __riscv_vnmsac_vv_i16m2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i16m2_m(...) __riscv_vnmsac_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i16m4(...) __riscv_vnmsac_vv_i16m4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i16m4_m(...) __riscv_vnmsac_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i16m8(...) __riscv_vnmsac_vv_i16m8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i16m8_m(...) __riscv_vnmsac_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i16mf2(...) __riscv_vnmsac_vv_i16mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i16mf2_m(...) __riscv_vnmsac_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i16mf4(...) __riscv_vnmsac_vv_i16mf4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i16mf4_m(...) __riscv_vnmsac_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i32m1(...) __riscv_vnmsac_vv_i32m1_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i32m1_m(...) __riscv_vnmsac_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i32m2(...) __riscv_vnmsac_vv_i32m2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i32m2_m(...) __riscv_vnmsac_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i32m4(...) __riscv_vnmsac_vv_i32m4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i32m4_m(...) __riscv_vnmsac_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i32m8(...) __riscv_vnmsac_vv_i32m8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i32m8_m(...) __riscv_vnmsac_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i32mf2(...) __riscv_vnmsac_vv_i32mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i32mf2_m(...) __riscv_vnmsac_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i64m1(...) __riscv_vnmsac_vv_i64m1_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i64m1_m(...) __riscv_vnmsac_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i64m2(...) __riscv_vnmsac_vv_i64m2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i64m2_m(...) __riscv_vnmsac_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i64m4(...) __riscv_vnmsac_vv_i64m4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i64m4_m(...) __riscv_vnmsac_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i64m8(...) __riscv_vnmsac_vv_i64m8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i64m8_m(...) __riscv_vnmsac_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i8m1(...) __riscv_vnmsac_vv_i8m1_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i8m1_m(...) __riscv_vnmsac_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i8m2(...) __riscv_vnmsac_vv_i8m2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i8m2_m(...) __riscv_vnmsac_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i8m4(...) __riscv_vnmsac_vv_i8m4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i8m4_m(...) __riscv_vnmsac_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i8m8(...) __riscv_vnmsac_vv_i8m8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i8m8_m(...) __riscv_vnmsac_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i8mf2(...) __riscv_vnmsac_vv_i8mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i8mf2_m(...) __riscv_vnmsac_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i8mf4(...) __riscv_vnmsac_vv_i8mf4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i8mf4_m(...) __riscv_vnmsac_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_i8mf8(...) __riscv_vnmsac_vv_i8mf8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_i8mf8_m(...) __riscv_vnmsac_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u16m1(...) __riscv_vnmsac_vv_u16m1_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u16m1_m(...) __riscv_vnmsac_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u16m2(...) __riscv_vnmsac_vv_u16m2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u16m2_m(...) __riscv_vnmsac_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u16m4(...) __riscv_vnmsac_vv_u16m4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u16m4_m(...) __riscv_vnmsac_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u16m8(...) __riscv_vnmsac_vv_u16m8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u16m8_m(...) __riscv_vnmsac_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u16mf2(...) __riscv_vnmsac_vv_u16mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u16mf2_m(...) __riscv_vnmsac_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u16mf4(...) __riscv_vnmsac_vv_u16mf4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u16mf4_m(...) __riscv_vnmsac_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u32m1(...) __riscv_vnmsac_vv_u32m1_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u32m1_m(...) __riscv_vnmsac_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u32m2(...) __riscv_vnmsac_vv_u32m2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u32m2_m(...) __riscv_vnmsac_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u32m4(...) __riscv_vnmsac_vv_u32m4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u32m4_m(...) __riscv_vnmsac_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u32m8(...) __riscv_vnmsac_vv_u32m8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u32m8_m(...) __riscv_vnmsac_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u32mf2(...) __riscv_vnmsac_vv_u32mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u32mf2_m(...) __riscv_vnmsac_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u64m1(...) __riscv_vnmsac_vv_u64m1_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u64m1_m(...) __riscv_vnmsac_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u64m2(...) __riscv_vnmsac_vv_u64m2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u64m2_m(...) __riscv_vnmsac_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u64m4(...) __riscv_vnmsac_vv_u64m4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u64m4_m(...) __riscv_vnmsac_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u64m8(...) __riscv_vnmsac_vv_u64m8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u64m8_m(...) __riscv_vnmsac_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u8m1(...) __riscv_vnmsac_vv_u8m1_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u8m1_m(...) __riscv_vnmsac_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u8m2(...) __riscv_vnmsac_vv_u8m2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u8m2_m(...) __riscv_vnmsac_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u8m4(...) __riscv_vnmsac_vv_u8m4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u8m4_m(...) __riscv_vnmsac_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u8m8(...) __riscv_vnmsac_vv_u8m8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u8m8_m(...) __riscv_vnmsac_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u8mf2(...) __riscv_vnmsac_vv_u8mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u8mf2_m(...) __riscv_vnmsac_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u8mf4(...) __riscv_vnmsac_vv_u8mf4_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u8mf4_m(...) __riscv_vnmsac_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vnmsac_vv_u8mf8(...) __riscv_vnmsac_vv_u8mf8_tu(__VA_ARGS__) |
| #define | vnmsac_vv_u8mf8_m(...) __riscv_vnmsac_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i16m1(...) __riscv_vnmsac_vx_i16m1_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i16m1_m(...) __riscv_vnmsac_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i16m2(...) __riscv_vnmsac_vx_i16m2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i16m2_m(...) __riscv_vnmsac_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i16m4(...) __riscv_vnmsac_vx_i16m4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i16m4_m(...) __riscv_vnmsac_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i16m8(...) __riscv_vnmsac_vx_i16m8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i16m8_m(...) __riscv_vnmsac_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i16mf2(...) __riscv_vnmsac_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i16mf2_m(...) __riscv_vnmsac_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i16mf4(...) __riscv_vnmsac_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i16mf4_m(...) __riscv_vnmsac_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i32m1(...) __riscv_vnmsac_vx_i32m1_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i32m1_m(...) __riscv_vnmsac_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i32m2(...) __riscv_vnmsac_vx_i32m2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i32m2_m(...) __riscv_vnmsac_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i32m4(...) __riscv_vnmsac_vx_i32m4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i32m4_m(...) __riscv_vnmsac_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i32m8(...) __riscv_vnmsac_vx_i32m8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i32m8_m(...) __riscv_vnmsac_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i32mf2(...) __riscv_vnmsac_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i32mf2_m(...) __riscv_vnmsac_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i64m1(...) __riscv_vnmsac_vx_i64m1_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i64m1_m(...) __riscv_vnmsac_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i64m2(...) __riscv_vnmsac_vx_i64m2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i64m2_m(...) __riscv_vnmsac_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i64m4(...) __riscv_vnmsac_vx_i64m4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i64m4_m(...) __riscv_vnmsac_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i64m8(...) __riscv_vnmsac_vx_i64m8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i64m8_m(...) __riscv_vnmsac_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i8m1(...) __riscv_vnmsac_vx_i8m1_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i8m1_m(...) __riscv_vnmsac_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i8m2(...) __riscv_vnmsac_vx_i8m2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i8m2_m(...) __riscv_vnmsac_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i8m4(...) __riscv_vnmsac_vx_i8m4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i8m4_m(...) __riscv_vnmsac_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i8m8(...) __riscv_vnmsac_vx_i8m8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i8m8_m(...) __riscv_vnmsac_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i8mf2(...) __riscv_vnmsac_vx_i8mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i8mf2_m(...) __riscv_vnmsac_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i8mf4(...) __riscv_vnmsac_vx_i8mf4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i8mf4_m(...) __riscv_vnmsac_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_i8mf8(...) __riscv_vnmsac_vx_i8mf8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_i8mf8_m(...) __riscv_vnmsac_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u16m1(...) __riscv_vnmsac_vx_u16m1_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u16m1_m(...) __riscv_vnmsac_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u16m2(...) __riscv_vnmsac_vx_u16m2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u16m2_m(...) __riscv_vnmsac_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u16m4(...) __riscv_vnmsac_vx_u16m4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u16m4_m(...) __riscv_vnmsac_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u16m8(...) __riscv_vnmsac_vx_u16m8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u16m8_m(...) __riscv_vnmsac_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u16mf2(...) __riscv_vnmsac_vx_u16mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u16mf2_m(...) __riscv_vnmsac_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u16mf4(...) __riscv_vnmsac_vx_u16mf4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u16mf4_m(...) __riscv_vnmsac_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u32m1(...) __riscv_vnmsac_vx_u32m1_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u32m1_m(...) __riscv_vnmsac_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u32m2(...) __riscv_vnmsac_vx_u32m2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u32m2_m(...) __riscv_vnmsac_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u32m4(...) __riscv_vnmsac_vx_u32m4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u32m4_m(...) __riscv_vnmsac_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u32m8(...) __riscv_vnmsac_vx_u32m8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u32m8_m(...) __riscv_vnmsac_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u32mf2(...) __riscv_vnmsac_vx_u32mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u32mf2_m(...) __riscv_vnmsac_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u64m1(...) __riscv_vnmsac_vx_u64m1_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u64m1_m(...) __riscv_vnmsac_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u64m2(...) __riscv_vnmsac_vx_u64m2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u64m2_m(...) __riscv_vnmsac_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u64m4(...) __riscv_vnmsac_vx_u64m4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u64m4_m(...) __riscv_vnmsac_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u64m8(...) __riscv_vnmsac_vx_u64m8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u64m8_m(...) __riscv_vnmsac_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u8m1(...) __riscv_vnmsac_vx_u8m1_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u8m1_m(...) __riscv_vnmsac_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u8m2(...) __riscv_vnmsac_vx_u8m2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u8m2_m(...) __riscv_vnmsac_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u8m4(...) __riscv_vnmsac_vx_u8m4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u8m4_m(...) __riscv_vnmsac_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u8m8(...) __riscv_vnmsac_vx_u8m8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u8m8_m(...) __riscv_vnmsac_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u8mf2(...) __riscv_vnmsac_vx_u8mf2_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u8mf2_m(...) __riscv_vnmsac_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u8mf4(...) __riscv_vnmsac_vx_u8mf4_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u8mf4_m(...) __riscv_vnmsac_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vnmsac_vx_u8mf8(...) __riscv_vnmsac_vx_u8mf8_tu(__VA_ARGS__) |
| #define | vnmsac_vx_u8mf8_m(...) __riscv_vnmsac_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i16m1(...) __riscv_vnmsub_vv_i16m1_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i16m1_m(...) __riscv_vnmsub_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i16m2(...) __riscv_vnmsub_vv_i16m2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i16m2_m(...) __riscv_vnmsub_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i16m4(...) __riscv_vnmsub_vv_i16m4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i16m4_m(...) __riscv_vnmsub_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i16m8(...) __riscv_vnmsub_vv_i16m8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i16m8_m(...) __riscv_vnmsub_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i16mf2(...) __riscv_vnmsub_vv_i16mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i16mf2_m(...) __riscv_vnmsub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i16mf4(...) __riscv_vnmsub_vv_i16mf4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i16mf4_m(...) __riscv_vnmsub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i32m1(...) __riscv_vnmsub_vv_i32m1_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i32m1_m(...) __riscv_vnmsub_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i32m2(...) __riscv_vnmsub_vv_i32m2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i32m2_m(...) __riscv_vnmsub_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i32m4(...) __riscv_vnmsub_vv_i32m4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i32m4_m(...) __riscv_vnmsub_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i32m8(...) __riscv_vnmsub_vv_i32m8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i32m8_m(...) __riscv_vnmsub_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i32mf2(...) __riscv_vnmsub_vv_i32mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i32mf2_m(...) __riscv_vnmsub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i64m1(...) __riscv_vnmsub_vv_i64m1_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i64m1_m(...) __riscv_vnmsub_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i64m2(...) __riscv_vnmsub_vv_i64m2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i64m2_m(...) __riscv_vnmsub_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i64m4(...) __riscv_vnmsub_vv_i64m4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i64m4_m(...) __riscv_vnmsub_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i64m8(...) __riscv_vnmsub_vv_i64m8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i64m8_m(...) __riscv_vnmsub_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i8m1(...) __riscv_vnmsub_vv_i8m1_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i8m1_m(...) __riscv_vnmsub_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i8m2(...) __riscv_vnmsub_vv_i8m2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i8m2_m(...) __riscv_vnmsub_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i8m4(...) __riscv_vnmsub_vv_i8m4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i8m4_m(...) __riscv_vnmsub_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i8m8(...) __riscv_vnmsub_vv_i8m8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i8m8_m(...) __riscv_vnmsub_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i8mf2(...) __riscv_vnmsub_vv_i8mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i8mf2_m(...) __riscv_vnmsub_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i8mf4(...) __riscv_vnmsub_vv_i8mf4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i8mf4_m(...) __riscv_vnmsub_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_i8mf8(...) __riscv_vnmsub_vv_i8mf8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_i8mf8_m(...) __riscv_vnmsub_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u16m1(...) __riscv_vnmsub_vv_u16m1_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u16m1_m(...) __riscv_vnmsub_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u16m2(...) __riscv_vnmsub_vv_u16m2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u16m2_m(...) __riscv_vnmsub_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u16m4(...) __riscv_vnmsub_vv_u16m4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u16m4_m(...) __riscv_vnmsub_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u16m8(...) __riscv_vnmsub_vv_u16m8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u16m8_m(...) __riscv_vnmsub_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u16mf2(...) __riscv_vnmsub_vv_u16mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u16mf2_m(...) __riscv_vnmsub_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u16mf4(...) __riscv_vnmsub_vv_u16mf4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u16mf4_m(...) __riscv_vnmsub_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u32m1(...) __riscv_vnmsub_vv_u32m1_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u32m1_m(...) __riscv_vnmsub_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u32m2(...) __riscv_vnmsub_vv_u32m2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u32m2_m(...) __riscv_vnmsub_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u32m4(...) __riscv_vnmsub_vv_u32m4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u32m4_m(...) __riscv_vnmsub_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u32m8(...) __riscv_vnmsub_vv_u32m8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u32m8_m(...) __riscv_vnmsub_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u32mf2(...) __riscv_vnmsub_vv_u32mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u32mf2_m(...) __riscv_vnmsub_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u64m1(...) __riscv_vnmsub_vv_u64m1_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u64m1_m(...) __riscv_vnmsub_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u64m2(...) __riscv_vnmsub_vv_u64m2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u64m2_m(...) __riscv_vnmsub_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u64m4(...) __riscv_vnmsub_vv_u64m4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u64m4_m(...) __riscv_vnmsub_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u64m8(...) __riscv_vnmsub_vv_u64m8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u64m8_m(...) __riscv_vnmsub_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u8m1(...) __riscv_vnmsub_vv_u8m1_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u8m1_m(...) __riscv_vnmsub_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u8m2(...) __riscv_vnmsub_vv_u8m2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u8m2_m(...) __riscv_vnmsub_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u8m4(...) __riscv_vnmsub_vv_u8m4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u8m4_m(...) __riscv_vnmsub_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u8m8(...) __riscv_vnmsub_vv_u8m8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u8m8_m(...) __riscv_vnmsub_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u8mf2(...) __riscv_vnmsub_vv_u8mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u8mf2_m(...) __riscv_vnmsub_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u8mf4(...) __riscv_vnmsub_vv_u8mf4_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u8mf4_m(...) __riscv_vnmsub_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vnmsub_vv_u8mf8(...) __riscv_vnmsub_vv_u8mf8_tu(__VA_ARGS__) |
| #define | vnmsub_vv_u8mf8_m(...) __riscv_vnmsub_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i16m1(...) __riscv_vnmsub_vx_i16m1_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i16m1_m(...) __riscv_vnmsub_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i16m2(...) __riscv_vnmsub_vx_i16m2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i16m2_m(...) __riscv_vnmsub_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i16m4(...) __riscv_vnmsub_vx_i16m4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i16m4_m(...) __riscv_vnmsub_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i16m8(...) __riscv_vnmsub_vx_i16m8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i16m8_m(...) __riscv_vnmsub_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i16mf2(...) __riscv_vnmsub_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i16mf2_m(...) __riscv_vnmsub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i16mf4(...) __riscv_vnmsub_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i16mf4_m(...) __riscv_vnmsub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i32m1(...) __riscv_vnmsub_vx_i32m1_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i32m1_m(...) __riscv_vnmsub_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i32m2(...) __riscv_vnmsub_vx_i32m2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i32m2_m(...) __riscv_vnmsub_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i32m4(...) __riscv_vnmsub_vx_i32m4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i32m4_m(...) __riscv_vnmsub_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i32m8(...) __riscv_vnmsub_vx_i32m8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i32m8_m(...) __riscv_vnmsub_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i32mf2(...) __riscv_vnmsub_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i32mf2_m(...) __riscv_vnmsub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i64m1(...) __riscv_vnmsub_vx_i64m1_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i64m1_m(...) __riscv_vnmsub_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i64m2(...) __riscv_vnmsub_vx_i64m2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i64m2_m(...) __riscv_vnmsub_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i64m4(...) __riscv_vnmsub_vx_i64m4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i64m4_m(...) __riscv_vnmsub_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i64m8(...) __riscv_vnmsub_vx_i64m8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i64m8_m(...) __riscv_vnmsub_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i8m1(...) __riscv_vnmsub_vx_i8m1_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i8m1_m(...) __riscv_vnmsub_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i8m2(...) __riscv_vnmsub_vx_i8m2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i8m2_m(...) __riscv_vnmsub_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i8m4(...) __riscv_vnmsub_vx_i8m4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i8m4_m(...) __riscv_vnmsub_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i8m8(...) __riscv_vnmsub_vx_i8m8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i8m8_m(...) __riscv_vnmsub_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i8mf2(...) __riscv_vnmsub_vx_i8mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i8mf2_m(...) __riscv_vnmsub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i8mf4(...) __riscv_vnmsub_vx_i8mf4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i8mf4_m(...) __riscv_vnmsub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_i8mf8(...) __riscv_vnmsub_vx_i8mf8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_i8mf8_m(...) __riscv_vnmsub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u16m1(...) __riscv_vnmsub_vx_u16m1_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u16m1_m(...) __riscv_vnmsub_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u16m2(...) __riscv_vnmsub_vx_u16m2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u16m2_m(...) __riscv_vnmsub_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u16m4(...) __riscv_vnmsub_vx_u16m4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u16m4_m(...) __riscv_vnmsub_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u16m8(...) __riscv_vnmsub_vx_u16m8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u16m8_m(...) __riscv_vnmsub_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u16mf2(...) __riscv_vnmsub_vx_u16mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u16mf2_m(...) __riscv_vnmsub_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u16mf4(...) __riscv_vnmsub_vx_u16mf4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u16mf4_m(...) __riscv_vnmsub_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u32m1(...) __riscv_vnmsub_vx_u32m1_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u32m1_m(...) __riscv_vnmsub_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u32m2(...) __riscv_vnmsub_vx_u32m2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u32m2_m(...) __riscv_vnmsub_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u32m4(...) __riscv_vnmsub_vx_u32m4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u32m4_m(...) __riscv_vnmsub_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u32m8(...) __riscv_vnmsub_vx_u32m8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u32m8_m(...) __riscv_vnmsub_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u32mf2(...) __riscv_vnmsub_vx_u32mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u32mf2_m(...) __riscv_vnmsub_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u64m1(...) __riscv_vnmsub_vx_u64m1_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u64m1_m(...) __riscv_vnmsub_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u64m2(...) __riscv_vnmsub_vx_u64m2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u64m2_m(...) __riscv_vnmsub_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u64m4(...) __riscv_vnmsub_vx_u64m4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u64m4_m(...) __riscv_vnmsub_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u64m8(...) __riscv_vnmsub_vx_u64m8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u64m8_m(...) __riscv_vnmsub_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u8m1(...) __riscv_vnmsub_vx_u8m1_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u8m1_m(...) __riscv_vnmsub_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u8m2(...) __riscv_vnmsub_vx_u8m2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u8m2_m(...) __riscv_vnmsub_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u8m4(...) __riscv_vnmsub_vx_u8m4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u8m4_m(...) __riscv_vnmsub_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u8m8(...) __riscv_vnmsub_vx_u8m8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u8m8_m(...) __riscv_vnmsub_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u8mf2(...) __riscv_vnmsub_vx_u8mf2_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u8mf2_m(...) __riscv_vnmsub_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u8mf4(...) __riscv_vnmsub_vx_u8mf4_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u8mf4_m(...) __riscv_vnmsub_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vnmsub_vx_u8mf8(...) __riscv_vnmsub_vx_u8mf8_tu(__VA_ARGS__) |
| #define | vnmsub_vx_u8mf8_m(...) __riscv_vnmsub_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vnot_v_i16m1(...) __riscv_vnot_v_i16m1(__VA_ARGS__) |
| #define | vnot_v_i16m1_m(...) __riscv_vnot_v_i16m1_tumu(__VA_ARGS__) |
| #define | vnot_v_i16m2(...) __riscv_vnot_v_i16m2(__VA_ARGS__) |
| #define | vnot_v_i16m2_m(...) __riscv_vnot_v_i16m2_tumu(__VA_ARGS__) |
| #define | vnot_v_i16m4(...) __riscv_vnot_v_i16m4(__VA_ARGS__) |
| #define | vnot_v_i16m4_m(...) __riscv_vnot_v_i16m4_tumu(__VA_ARGS__) |
| #define | vnot_v_i16m8(...) __riscv_vnot_v_i16m8(__VA_ARGS__) |
| #define | vnot_v_i16m8_m(...) __riscv_vnot_v_i16m8_tumu(__VA_ARGS__) |
| #define | vnot_v_i16mf2(...) __riscv_vnot_v_i16mf2(__VA_ARGS__) |
| #define | vnot_v_i16mf2_m(...) __riscv_vnot_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vnot_v_i16mf4(...) __riscv_vnot_v_i16mf4(__VA_ARGS__) |
| #define | vnot_v_i16mf4_m(...) __riscv_vnot_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vnot_v_i32m1(...) __riscv_vnot_v_i32m1(__VA_ARGS__) |
| #define | vnot_v_i32m1_m(...) __riscv_vnot_v_i32m1_tumu(__VA_ARGS__) |
| #define | vnot_v_i32m2(...) __riscv_vnot_v_i32m2(__VA_ARGS__) |
| #define | vnot_v_i32m2_m(...) __riscv_vnot_v_i32m2_tumu(__VA_ARGS__) |
| #define | vnot_v_i32m4(...) __riscv_vnot_v_i32m4(__VA_ARGS__) |
| #define | vnot_v_i32m4_m(...) __riscv_vnot_v_i32m4_tumu(__VA_ARGS__) |
| #define | vnot_v_i32m8(...) __riscv_vnot_v_i32m8(__VA_ARGS__) |
| #define | vnot_v_i32m8_m(...) __riscv_vnot_v_i32m8_tumu(__VA_ARGS__) |
| #define | vnot_v_i32mf2(...) __riscv_vnot_v_i32mf2(__VA_ARGS__) |
| #define | vnot_v_i32mf2_m(...) __riscv_vnot_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vnot_v_i64m1(...) __riscv_vnot_v_i64m1(__VA_ARGS__) |
| #define | vnot_v_i64m1_m(...) __riscv_vnot_v_i64m1_tumu(__VA_ARGS__) |
| #define | vnot_v_i64m2(...) __riscv_vnot_v_i64m2(__VA_ARGS__) |
| #define | vnot_v_i64m2_m(...) __riscv_vnot_v_i64m2_tumu(__VA_ARGS__) |
| #define | vnot_v_i64m4(...) __riscv_vnot_v_i64m4(__VA_ARGS__) |
| #define | vnot_v_i64m4_m(...) __riscv_vnot_v_i64m4_tumu(__VA_ARGS__) |
| #define | vnot_v_i64m8(...) __riscv_vnot_v_i64m8(__VA_ARGS__) |
| #define | vnot_v_i64m8_m(...) __riscv_vnot_v_i64m8_tumu(__VA_ARGS__) |
| #define | vnot_v_i8m1(...) __riscv_vnot_v_i8m1(__VA_ARGS__) |
| #define | vnot_v_i8m1_m(...) __riscv_vnot_v_i8m1_tumu(__VA_ARGS__) |
| #define | vnot_v_i8m2(...) __riscv_vnot_v_i8m2(__VA_ARGS__) |
| #define | vnot_v_i8m2_m(...) __riscv_vnot_v_i8m2_tumu(__VA_ARGS__) |
| #define | vnot_v_i8m4(...) __riscv_vnot_v_i8m4(__VA_ARGS__) |
| #define | vnot_v_i8m4_m(...) __riscv_vnot_v_i8m4_tumu(__VA_ARGS__) |
| #define | vnot_v_i8m8(...) __riscv_vnot_v_i8m8(__VA_ARGS__) |
| #define | vnot_v_i8m8_m(...) __riscv_vnot_v_i8m8_tumu(__VA_ARGS__) |
| #define | vnot_v_i8mf2(...) __riscv_vnot_v_i8mf2(__VA_ARGS__) |
| #define | vnot_v_i8mf2_m(...) __riscv_vnot_v_i8mf2_tumu(__VA_ARGS__) |
| #define | vnot_v_i8mf4(...) __riscv_vnot_v_i8mf4(__VA_ARGS__) |
| #define | vnot_v_i8mf4_m(...) __riscv_vnot_v_i8mf4_tumu(__VA_ARGS__) |
| #define | vnot_v_i8mf8(...) __riscv_vnot_v_i8mf8(__VA_ARGS__) |
| #define | vnot_v_i8mf8_m(...) __riscv_vnot_v_i8mf8_tumu(__VA_ARGS__) |
| #define | vnot_v_u16m1(...) __riscv_vnot_v_u16m1(__VA_ARGS__) |
| #define | vnot_v_u16m1_m(...) __riscv_vnot_v_u16m1_tumu(__VA_ARGS__) |
| #define | vnot_v_u16m2(...) __riscv_vnot_v_u16m2(__VA_ARGS__) |
| #define | vnot_v_u16m2_m(...) __riscv_vnot_v_u16m2_tumu(__VA_ARGS__) |
| #define | vnot_v_u16m4(...) __riscv_vnot_v_u16m4(__VA_ARGS__) |
| #define | vnot_v_u16m4_m(...) __riscv_vnot_v_u16m4_tumu(__VA_ARGS__) |
| #define | vnot_v_u16m8(...) __riscv_vnot_v_u16m8(__VA_ARGS__) |
| #define | vnot_v_u16m8_m(...) __riscv_vnot_v_u16m8_tumu(__VA_ARGS__) |
| #define | vnot_v_u16mf2(...) __riscv_vnot_v_u16mf2(__VA_ARGS__) |
| #define | vnot_v_u16mf2_m(...) __riscv_vnot_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vnot_v_u16mf4(...) __riscv_vnot_v_u16mf4(__VA_ARGS__) |
| #define | vnot_v_u16mf4_m(...) __riscv_vnot_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vnot_v_u32m1(...) __riscv_vnot_v_u32m1(__VA_ARGS__) |
| #define | vnot_v_u32m1_m(...) __riscv_vnot_v_u32m1_tumu(__VA_ARGS__) |
| #define | vnot_v_u32m2(...) __riscv_vnot_v_u32m2(__VA_ARGS__) |
| #define | vnot_v_u32m2_m(...) __riscv_vnot_v_u32m2_tumu(__VA_ARGS__) |
| #define | vnot_v_u32m4(...) __riscv_vnot_v_u32m4(__VA_ARGS__) |
| #define | vnot_v_u32m4_m(...) __riscv_vnot_v_u32m4_tumu(__VA_ARGS__) |
| #define | vnot_v_u32m8(...) __riscv_vnot_v_u32m8(__VA_ARGS__) |
| #define | vnot_v_u32m8_m(...) __riscv_vnot_v_u32m8_tumu(__VA_ARGS__) |
| #define | vnot_v_u32mf2(...) __riscv_vnot_v_u32mf2(__VA_ARGS__) |
| #define | vnot_v_u32mf2_m(...) __riscv_vnot_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vnot_v_u64m1(...) __riscv_vnot_v_u64m1(__VA_ARGS__) |
| #define | vnot_v_u64m1_m(...) __riscv_vnot_v_u64m1_tumu(__VA_ARGS__) |
| #define | vnot_v_u64m2(...) __riscv_vnot_v_u64m2(__VA_ARGS__) |
| #define | vnot_v_u64m2_m(...) __riscv_vnot_v_u64m2_tumu(__VA_ARGS__) |
| #define | vnot_v_u64m4(...) __riscv_vnot_v_u64m4(__VA_ARGS__) |
| #define | vnot_v_u64m4_m(...) __riscv_vnot_v_u64m4_tumu(__VA_ARGS__) |
| #define | vnot_v_u64m8(...) __riscv_vnot_v_u64m8(__VA_ARGS__) |
| #define | vnot_v_u64m8_m(...) __riscv_vnot_v_u64m8_tumu(__VA_ARGS__) |
| #define | vnot_v_u8m1(...) __riscv_vnot_v_u8m1(__VA_ARGS__) |
| #define | vnot_v_u8m1_m(...) __riscv_vnot_v_u8m1_tumu(__VA_ARGS__) |
| #define | vnot_v_u8m2(...) __riscv_vnot_v_u8m2(__VA_ARGS__) |
| #define | vnot_v_u8m2_m(...) __riscv_vnot_v_u8m2_tumu(__VA_ARGS__) |
| #define | vnot_v_u8m4(...) __riscv_vnot_v_u8m4(__VA_ARGS__) |
| #define | vnot_v_u8m4_m(...) __riscv_vnot_v_u8m4_tumu(__VA_ARGS__) |
| #define | vnot_v_u8m8(...) __riscv_vnot_v_u8m8(__VA_ARGS__) |
| #define | vnot_v_u8m8_m(...) __riscv_vnot_v_u8m8_tumu(__VA_ARGS__) |
| #define | vnot_v_u8mf2(...) __riscv_vnot_v_u8mf2(__VA_ARGS__) |
| #define | vnot_v_u8mf2_m(...) __riscv_vnot_v_u8mf2_tumu(__VA_ARGS__) |
| #define | vnot_v_u8mf4(...) __riscv_vnot_v_u8mf4(__VA_ARGS__) |
| #define | vnot_v_u8mf4_m(...) __riscv_vnot_v_u8mf4_tumu(__VA_ARGS__) |
| #define | vnot_v_u8mf8(...) __riscv_vnot_v_u8mf8(__VA_ARGS__) |
| #define | vnot_v_u8mf8_m(...) __riscv_vnot_v_u8mf8_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i16m1(...) __riscv_vnsra_wv_i16m1(__VA_ARGS__) |
| #define | vnsra_wv_i16m1_m(...) __riscv_vnsra_wv_i16m1_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i16m2(...) __riscv_vnsra_wv_i16m2(__VA_ARGS__) |
| #define | vnsra_wv_i16m2_m(...) __riscv_vnsra_wv_i16m2_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i16m4(...) __riscv_vnsra_wv_i16m4(__VA_ARGS__) |
| #define | vnsra_wv_i16m4_m(...) __riscv_vnsra_wv_i16m4_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i16mf2(...) __riscv_vnsra_wv_i16mf2(__VA_ARGS__) |
| #define | vnsra_wv_i16mf2_m(...) __riscv_vnsra_wv_i16mf2_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i16mf4(...) __riscv_vnsra_wv_i16mf4(__VA_ARGS__) |
| #define | vnsra_wv_i16mf4_m(...) __riscv_vnsra_wv_i16mf4_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i32m1(...) __riscv_vnsra_wv_i32m1(__VA_ARGS__) |
| #define | vnsra_wv_i32m1_m(...) __riscv_vnsra_wv_i32m1_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i32m2(...) __riscv_vnsra_wv_i32m2(__VA_ARGS__) |
| #define | vnsra_wv_i32m2_m(...) __riscv_vnsra_wv_i32m2_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i32m4(...) __riscv_vnsra_wv_i32m4(__VA_ARGS__) |
| #define | vnsra_wv_i32m4_m(...) __riscv_vnsra_wv_i32m4_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i32mf2(...) __riscv_vnsra_wv_i32mf2(__VA_ARGS__) |
| #define | vnsra_wv_i32mf2_m(...) __riscv_vnsra_wv_i32mf2_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i8m1(...) __riscv_vnsra_wv_i8m1(__VA_ARGS__) |
| #define | vnsra_wv_i8m1_m(...) __riscv_vnsra_wv_i8m1_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i8m2(...) __riscv_vnsra_wv_i8m2(__VA_ARGS__) |
| #define | vnsra_wv_i8m2_m(...) __riscv_vnsra_wv_i8m2_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i8m4(...) __riscv_vnsra_wv_i8m4(__VA_ARGS__) |
| #define | vnsra_wv_i8m4_m(...) __riscv_vnsra_wv_i8m4_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i8mf2(...) __riscv_vnsra_wv_i8mf2(__VA_ARGS__) |
| #define | vnsra_wv_i8mf2_m(...) __riscv_vnsra_wv_i8mf2_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i8mf4(...) __riscv_vnsra_wv_i8mf4(__VA_ARGS__) |
| #define | vnsra_wv_i8mf4_m(...) __riscv_vnsra_wv_i8mf4_tumu(__VA_ARGS__) |
| #define | vnsra_wv_i8mf8(...) __riscv_vnsra_wv_i8mf8(__VA_ARGS__) |
| #define | vnsra_wv_i8mf8_m(...) __riscv_vnsra_wv_i8mf8_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i16m1(...) __riscv_vnsra_wx_i16m1(__VA_ARGS__) |
| #define | vnsra_wx_i16m1_m(...) __riscv_vnsra_wx_i16m1_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i16m2(...) __riscv_vnsra_wx_i16m2(__VA_ARGS__) |
| #define | vnsra_wx_i16m2_m(...) __riscv_vnsra_wx_i16m2_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i16m4(...) __riscv_vnsra_wx_i16m4(__VA_ARGS__) |
| #define | vnsra_wx_i16m4_m(...) __riscv_vnsra_wx_i16m4_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i16mf2(...) __riscv_vnsra_wx_i16mf2(__VA_ARGS__) |
| #define | vnsra_wx_i16mf2_m(...) __riscv_vnsra_wx_i16mf2_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i16mf4(...) __riscv_vnsra_wx_i16mf4(__VA_ARGS__) |
| #define | vnsra_wx_i16mf4_m(...) __riscv_vnsra_wx_i16mf4_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i32m1(...) __riscv_vnsra_wx_i32m1(__VA_ARGS__) |
| #define | vnsra_wx_i32m1_m(...) __riscv_vnsra_wx_i32m1_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i32m2(...) __riscv_vnsra_wx_i32m2(__VA_ARGS__) |
| #define | vnsra_wx_i32m2_m(...) __riscv_vnsra_wx_i32m2_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i32m4(...) __riscv_vnsra_wx_i32m4(__VA_ARGS__) |
| #define | vnsra_wx_i32m4_m(...) __riscv_vnsra_wx_i32m4_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i32mf2(...) __riscv_vnsra_wx_i32mf2(__VA_ARGS__) |
| #define | vnsra_wx_i32mf2_m(...) __riscv_vnsra_wx_i32mf2_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i8m1(...) __riscv_vnsra_wx_i8m1(__VA_ARGS__) |
| #define | vnsra_wx_i8m1_m(...) __riscv_vnsra_wx_i8m1_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i8m2(...) __riscv_vnsra_wx_i8m2(__VA_ARGS__) |
| #define | vnsra_wx_i8m2_m(...) __riscv_vnsra_wx_i8m2_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i8m4(...) __riscv_vnsra_wx_i8m4(__VA_ARGS__) |
| #define | vnsra_wx_i8m4_m(...) __riscv_vnsra_wx_i8m4_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i8mf2(...) __riscv_vnsra_wx_i8mf2(__VA_ARGS__) |
| #define | vnsra_wx_i8mf2_m(...) __riscv_vnsra_wx_i8mf2_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i8mf4(...) __riscv_vnsra_wx_i8mf4(__VA_ARGS__) |
| #define | vnsra_wx_i8mf4_m(...) __riscv_vnsra_wx_i8mf4_tumu(__VA_ARGS__) |
| #define | vnsra_wx_i8mf8(...) __riscv_vnsra_wx_i8mf8(__VA_ARGS__) |
| #define | vnsra_wx_i8mf8_m(...) __riscv_vnsra_wx_i8mf8_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u16m1(...) __riscv_vnsrl_wv_u16m1(__VA_ARGS__) |
| #define | vnsrl_wv_u16m1_m(...) __riscv_vnsrl_wv_u16m1_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u16m2(...) __riscv_vnsrl_wv_u16m2(__VA_ARGS__) |
| #define | vnsrl_wv_u16m2_m(...) __riscv_vnsrl_wv_u16m2_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u16m4(...) __riscv_vnsrl_wv_u16m4(__VA_ARGS__) |
| #define | vnsrl_wv_u16m4_m(...) __riscv_vnsrl_wv_u16m4_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u16mf2(...) __riscv_vnsrl_wv_u16mf2(__VA_ARGS__) |
| #define | vnsrl_wv_u16mf2_m(...) __riscv_vnsrl_wv_u16mf2_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u16mf4(...) __riscv_vnsrl_wv_u16mf4(__VA_ARGS__) |
| #define | vnsrl_wv_u16mf4_m(...) __riscv_vnsrl_wv_u16mf4_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u32m1(...) __riscv_vnsrl_wv_u32m1(__VA_ARGS__) |
| #define | vnsrl_wv_u32m1_m(...) __riscv_vnsrl_wv_u32m1_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u32m2(...) __riscv_vnsrl_wv_u32m2(__VA_ARGS__) |
| #define | vnsrl_wv_u32m2_m(...) __riscv_vnsrl_wv_u32m2_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u32m4(...) __riscv_vnsrl_wv_u32m4(__VA_ARGS__) |
| #define | vnsrl_wv_u32m4_m(...) __riscv_vnsrl_wv_u32m4_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u32mf2(...) __riscv_vnsrl_wv_u32mf2(__VA_ARGS__) |
| #define | vnsrl_wv_u32mf2_m(...) __riscv_vnsrl_wv_u32mf2_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u8m1(...) __riscv_vnsrl_wv_u8m1(__VA_ARGS__) |
| #define | vnsrl_wv_u8m1_m(...) __riscv_vnsrl_wv_u8m1_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u8m2(...) __riscv_vnsrl_wv_u8m2(__VA_ARGS__) |
| #define | vnsrl_wv_u8m2_m(...) __riscv_vnsrl_wv_u8m2_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u8m4(...) __riscv_vnsrl_wv_u8m4(__VA_ARGS__) |
| #define | vnsrl_wv_u8m4_m(...) __riscv_vnsrl_wv_u8m4_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u8mf2(...) __riscv_vnsrl_wv_u8mf2(__VA_ARGS__) |
| #define | vnsrl_wv_u8mf2_m(...) __riscv_vnsrl_wv_u8mf2_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u8mf4(...) __riscv_vnsrl_wv_u8mf4(__VA_ARGS__) |
| #define | vnsrl_wv_u8mf4_m(...) __riscv_vnsrl_wv_u8mf4_tumu(__VA_ARGS__) |
| #define | vnsrl_wv_u8mf8(...) __riscv_vnsrl_wv_u8mf8(__VA_ARGS__) |
| #define | vnsrl_wv_u8mf8_m(...) __riscv_vnsrl_wv_u8mf8_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u16m1(...) __riscv_vnsrl_wx_u16m1(__VA_ARGS__) |
| #define | vnsrl_wx_u16m1_m(...) __riscv_vnsrl_wx_u16m1_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u16m2(...) __riscv_vnsrl_wx_u16m2(__VA_ARGS__) |
| #define | vnsrl_wx_u16m2_m(...) __riscv_vnsrl_wx_u16m2_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u16m4(...) __riscv_vnsrl_wx_u16m4(__VA_ARGS__) |
| #define | vnsrl_wx_u16m4_m(...) __riscv_vnsrl_wx_u16m4_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u16mf2(...) __riscv_vnsrl_wx_u16mf2(__VA_ARGS__) |
| #define | vnsrl_wx_u16mf2_m(...) __riscv_vnsrl_wx_u16mf2_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u16mf4(...) __riscv_vnsrl_wx_u16mf4(__VA_ARGS__) |
| #define | vnsrl_wx_u16mf4_m(...) __riscv_vnsrl_wx_u16mf4_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u32m1(...) __riscv_vnsrl_wx_u32m1(__VA_ARGS__) |
| #define | vnsrl_wx_u32m1_m(...) __riscv_vnsrl_wx_u32m1_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u32m2(...) __riscv_vnsrl_wx_u32m2(__VA_ARGS__) |
| #define | vnsrl_wx_u32m2_m(...) __riscv_vnsrl_wx_u32m2_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u32m4(...) __riscv_vnsrl_wx_u32m4(__VA_ARGS__) |
| #define | vnsrl_wx_u32m4_m(...) __riscv_vnsrl_wx_u32m4_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u32mf2(...) __riscv_vnsrl_wx_u32mf2(__VA_ARGS__) |
| #define | vnsrl_wx_u32mf2_m(...) __riscv_vnsrl_wx_u32mf2_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u8m1(...) __riscv_vnsrl_wx_u8m1(__VA_ARGS__) |
| #define | vnsrl_wx_u8m1_m(...) __riscv_vnsrl_wx_u8m1_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u8m2(...) __riscv_vnsrl_wx_u8m2(__VA_ARGS__) |
| #define | vnsrl_wx_u8m2_m(...) __riscv_vnsrl_wx_u8m2_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u8m4(...) __riscv_vnsrl_wx_u8m4(__VA_ARGS__) |
| #define | vnsrl_wx_u8m4_m(...) __riscv_vnsrl_wx_u8m4_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u8mf2(...) __riscv_vnsrl_wx_u8mf2(__VA_ARGS__) |
| #define | vnsrl_wx_u8mf2_m(...) __riscv_vnsrl_wx_u8mf2_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u8mf4(...) __riscv_vnsrl_wx_u8mf4(__VA_ARGS__) |
| #define | vnsrl_wx_u8mf4_m(...) __riscv_vnsrl_wx_u8mf4_tumu(__VA_ARGS__) |
| #define | vnsrl_wx_u8mf8(...) __riscv_vnsrl_wx_u8mf8(__VA_ARGS__) |
| #define | vnsrl_wx_u8mf8_m(...) __riscv_vnsrl_wx_u8mf8_tumu(__VA_ARGS__) |
| #define | vor_vv_i16m1(...) __riscv_vor_vv_i16m1(__VA_ARGS__) |
| #define | vor_vv_i16m1_m(...) __riscv_vor_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vor_vv_i16m2(...) __riscv_vor_vv_i16m2(__VA_ARGS__) |
| #define | vor_vv_i16m2_m(...) __riscv_vor_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vor_vv_i16m4(...) __riscv_vor_vv_i16m4(__VA_ARGS__) |
| #define | vor_vv_i16m4_m(...) __riscv_vor_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vor_vv_i16m8(...) __riscv_vor_vv_i16m8(__VA_ARGS__) |
| #define | vor_vv_i16m8_m(...) __riscv_vor_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vor_vv_i16mf2(...) __riscv_vor_vv_i16mf2(__VA_ARGS__) |
| #define | vor_vv_i16mf2_m(...) __riscv_vor_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vor_vv_i16mf4(...) __riscv_vor_vv_i16mf4(__VA_ARGS__) |
| #define | vor_vv_i16mf4_m(...) __riscv_vor_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vor_vv_i32m1(...) __riscv_vor_vv_i32m1(__VA_ARGS__) |
| #define | vor_vv_i32m1_m(...) __riscv_vor_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vor_vv_i32m2(...) __riscv_vor_vv_i32m2(__VA_ARGS__) |
| #define | vor_vv_i32m2_m(...) __riscv_vor_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vor_vv_i32m4(...) __riscv_vor_vv_i32m4(__VA_ARGS__) |
| #define | vor_vv_i32m4_m(...) __riscv_vor_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vor_vv_i32m8(...) __riscv_vor_vv_i32m8(__VA_ARGS__) |
| #define | vor_vv_i32m8_m(...) __riscv_vor_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vor_vv_i32mf2(...) __riscv_vor_vv_i32mf2(__VA_ARGS__) |
| #define | vor_vv_i32mf2_m(...) __riscv_vor_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vor_vv_i64m1(...) __riscv_vor_vv_i64m1(__VA_ARGS__) |
| #define | vor_vv_i64m1_m(...) __riscv_vor_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vor_vv_i64m2(...) __riscv_vor_vv_i64m2(__VA_ARGS__) |
| #define | vor_vv_i64m2_m(...) __riscv_vor_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vor_vv_i64m4(...) __riscv_vor_vv_i64m4(__VA_ARGS__) |
| #define | vor_vv_i64m4_m(...) __riscv_vor_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vor_vv_i64m8(...) __riscv_vor_vv_i64m8(__VA_ARGS__) |
| #define | vor_vv_i64m8_m(...) __riscv_vor_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vor_vv_i8m1(...) __riscv_vor_vv_i8m1(__VA_ARGS__) |
| #define | vor_vv_i8m1_m(...) __riscv_vor_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vor_vv_i8m2(...) __riscv_vor_vv_i8m2(__VA_ARGS__) |
| #define | vor_vv_i8m2_m(...) __riscv_vor_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vor_vv_i8m4(...) __riscv_vor_vv_i8m4(__VA_ARGS__) |
| #define | vor_vv_i8m4_m(...) __riscv_vor_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vor_vv_i8m8(...) __riscv_vor_vv_i8m8(__VA_ARGS__) |
| #define | vor_vv_i8m8_m(...) __riscv_vor_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vor_vv_i8mf2(...) __riscv_vor_vv_i8mf2(__VA_ARGS__) |
| #define | vor_vv_i8mf2_m(...) __riscv_vor_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vor_vv_i8mf4(...) __riscv_vor_vv_i8mf4(__VA_ARGS__) |
| #define | vor_vv_i8mf4_m(...) __riscv_vor_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vor_vv_i8mf8(...) __riscv_vor_vv_i8mf8(__VA_ARGS__) |
| #define | vor_vv_i8mf8_m(...) __riscv_vor_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vor_vv_u16m1(...) __riscv_vor_vv_u16m1(__VA_ARGS__) |
| #define | vor_vv_u16m1_m(...) __riscv_vor_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vor_vv_u16m2(...) __riscv_vor_vv_u16m2(__VA_ARGS__) |
| #define | vor_vv_u16m2_m(...) __riscv_vor_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vor_vv_u16m4(...) __riscv_vor_vv_u16m4(__VA_ARGS__) |
| #define | vor_vv_u16m4_m(...) __riscv_vor_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vor_vv_u16m8(...) __riscv_vor_vv_u16m8(__VA_ARGS__) |
| #define | vor_vv_u16m8_m(...) __riscv_vor_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vor_vv_u16mf2(...) __riscv_vor_vv_u16mf2(__VA_ARGS__) |
| #define | vor_vv_u16mf2_m(...) __riscv_vor_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vor_vv_u16mf4(...) __riscv_vor_vv_u16mf4(__VA_ARGS__) |
| #define | vor_vv_u16mf4_m(...) __riscv_vor_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vor_vv_u32m1(...) __riscv_vor_vv_u32m1(__VA_ARGS__) |
| #define | vor_vv_u32m1_m(...) __riscv_vor_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vor_vv_u32m2(...) __riscv_vor_vv_u32m2(__VA_ARGS__) |
| #define | vor_vv_u32m2_m(...) __riscv_vor_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vor_vv_u32m4(...) __riscv_vor_vv_u32m4(__VA_ARGS__) |
| #define | vor_vv_u32m4_m(...) __riscv_vor_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vor_vv_u32m8(...) __riscv_vor_vv_u32m8(__VA_ARGS__) |
| #define | vor_vv_u32m8_m(...) __riscv_vor_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vor_vv_u32mf2(...) __riscv_vor_vv_u32mf2(__VA_ARGS__) |
| #define | vor_vv_u32mf2_m(...) __riscv_vor_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vor_vv_u64m1(...) __riscv_vor_vv_u64m1(__VA_ARGS__) |
| #define | vor_vv_u64m1_m(...) __riscv_vor_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vor_vv_u64m2(...) __riscv_vor_vv_u64m2(__VA_ARGS__) |
| #define | vor_vv_u64m2_m(...) __riscv_vor_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vor_vv_u64m4(...) __riscv_vor_vv_u64m4(__VA_ARGS__) |
| #define | vor_vv_u64m4_m(...) __riscv_vor_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vor_vv_u64m8(...) __riscv_vor_vv_u64m8(__VA_ARGS__) |
| #define | vor_vv_u64m8_m(...) __riscv_vor_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vor_vv_u8m1(...) __riscv_vor_vv_u8m1(__VA_ARGS__) |
| #define | vor_vv_u8m1_m(...) __riscv_vor_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vor_vv_u8m2(...) __riscv_vor_vv_u8m2(__VA_ARGS__) |
| #define | vor_vv_u8m2_m(...) __riscv_vor_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vor_vv_u8m4(...) __riscv_vor_vv_u8m4(__VA_ARGS__) |
| #define | vor_vv_u8m4_m(...) __riscv_vor_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vor_vv_u8m8(...) __riscv_vor_vv_u8m8(__VA_ARGS__) |
| #define | vor_vv_u8m8_m(...) __riscv_vor_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vor_vv_u8mf2(...) __riscv_vor_vv_u8mf2(__VA_ARGS__) |
| #define | vor_vv_u8mf2_m(...) __riscv_vor_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vor_vv_u8mf4(...) __riscv_vor_vv_u8mf4(__VA_ARGS__) |
| #define | vor_vv_u8mf4_m(...) __riscv_vor_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vor_vv_u8mf8(...) __riscv_vor_vv_u8mf8(__VA_ARGS__) |
| #define | vor_vv_u8mf8_m(...) __riscv_vor_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vor_vx_i16m1(...) __riscv_vor_vx_i16m1(__VA_ARGS__) |
| #define | vor_vx_i16m1_m(...) __riscv_vor_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vor_vx_i16m2(...) __riscv_vor_vx_i16m2(__VA_ARGS__) |
| #define | vor_vx_i16m2_m(...) __riscv_vor_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vor_vx_i16m4(...) __riscv_vor_vx_i16m4(__VA_ARGS__) |
| #define | vor_vx_i16m4_m(...) __riscv_vor_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vor_vx_i16m8(...) __riscv_vor_vx_i16m8(__VA_ARGS__) |
| #define | vor_vx_i16m8_m(...) __riscv_vor_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vor_vx_i16mf2(...) __riscv_vor_vx_i16mf2(__VA_ARGS__) |
| #define | vor_vx_i16mf2_m(...) __riscv_vor_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vor_vx_i16mf4(...) __riscv_vor_vx_i16mf4(__VA_ARGS__) |
| #define | vor_vx_i16mf4_m(...) __riscv_vor_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vor_vx_i32m1(...) __riscv_vor_vx_i32m1(__VA_ARGS__) |
| #define | vor_vx_i32m1_m(...) __riscv_vor_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vor_vx_i32m2(...) __riscv_vor_vx_i32m2(__VA_ARGS__) |
| #define | vor_vx_i32m2_m(...) __riscv_vor_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vor_vx_i32m4(...) __riscv_vor_vx_i32m4(__VA_ARGS__) |
| #define | vor_vx_i32m4_m(...) __riscv_vor_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vor_vx_i32m8(...) __riscv_vor_vx_i32m8(__VA_ARGS__) |
| #define | vor_vx_i32m8_m(...) __riscv_vor_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vor_vx_i32mf2(...) __riscv_vor_vx_i32mf2(__VA_ARGS__) |
| #define | vor_vx_i32mf2_m(...) __riscv_vor_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vor_vx_i64m1(...) __riscv_vor_vx_i64m1(__VA_ARGS__) |
| #define | vor_vx_i64m1_m(...) __riscv_vor_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vor_vx_i64m2(...) __riscv_vor_vx_i64m2(__VA_ARGS__) |
| #define | vor_vx_i64m2_m(...) __riscv_vor_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vor_vx_i64m4(...) __riscv_vor_vx_i64m4(__VA_ARGS__) |
| #define | vor_vx_i64m4_m(...) __riscv_vor_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vor_vx_i64m8(...) __riscv_vor_vx_i64m8(__VA_ARGS__) |
| #define | vor_vx_i64m8_m(...) __riscv_vor_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vor_vx_i8m1(...) __riscv_vor_vx_i8m1(__VA_ARGS__) |
| #define | vor_vx_i8m1_m(...) __riscv_vor_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vor_vx_i8m2(...) __riscv_vor_vx_i8m2(__VA_ARGS__) |
| #define | vor_vx_i8m2_m(...) __riscv_vor_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vor_vx_i8m4(...) __riscv_vor_vx_i8m4(__VA_ARGS__) |
| #define | vor_vx_i8m4_m(...) __riscv_vor_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vor_vx_i8m8(...) __riscv_vor_vx_i8m8(__VA_ARGS__) |
| #define | vor_vx_i8m8_m(...) __riscv_vor_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vor_vx_i8mf2(...) __riscv_vor_vx_i8mf2(__VA_ARGS__) |
| #define | vor_vx_i8mf2_m(...) __riscv_vor_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vor_vx_i8mf4(...) __riscv_vor_vx_i8mf4(__VA_ARGS__) |
| #define | vor_vx_i8mf4_m(...) __riscv_vor_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vor_vx_i8mf8(...) __riscv_vor_vx_i8mf8(__VA_ARGS__) |
| #define | vor_vx_i8mf8_m(...) __riscv_vor_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vor_vx_u16m1(...) __riscv_vor_vx_u16m1(__VA_ARGS__) |
| #define | vor_vx_u16m1_m(...) __riscv_vor_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vor_vx_u16m2(...) __riscv_vor_vx_u16m2(__VA_ARGS__) |
| #define | vor_vx_u16m2_m(...) __riscv_vor_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vor_vx_u16m4(...) __riscv_vor_vx_u16m4(__VA_ARGS__) |
| #define | vor_vx_u16m4_m(...) __riscv_vor_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vor_vx_u16m8(...) __riscv_vor_vx_u16m8(__VA_ARGS__) |
| #define | vor_vx_u16m8_m(...) __riscv_vor_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vor_vx_u16mf2(...) __riscv_vor_vx_u16mf2(__VA_ARGS__) |
| #define | vor_vx_u16mf2_m(...) __riscv_vor_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vor_vx_u16mf4(...) __riscv_vor_vx_u16mf4(__VA_ARGS__) |
| #define | vor_vx_u16mf4_m(...) __riscv_vor_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vor_vx_u32m1(...) __riscv_vor_vx_u32m1(__VA_ARGS__) |
| #define | vor_vx_u32m1_m(...) __riscv_vor_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vor_vx_u32m2(...) __riscv_vor_vx_u32m2(__VA_ARGS__) |
| #define | vor_vx_u32m2_m(...) __riscv_vor_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vor_vx_u32m4(...) __riscv_vor_vx_u32m4(__VA_ARGS__) |
| #define | vor_vx_u32m4_m(...) __riscv_vor_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vor_vx_u32m8(...) __riscv_vor_vx_u32m8(__VA_ARGS__) |
| #define | vor_vx_u32m8_m(...) __riscv_vor_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vor_vx_u32mf2(...) __riscv_vor_vx_u32mf2(__VA_ARGS__) |
| #define | vor_vx_u32mf2_m(...) __riscv_vor_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vor_vx_u64m1(...) __riscv_vor_vx_u64m1(__VA_ARGS__) |
| #define | vor_vx_u64m1_m(...) __riscv_vor_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vor_vx_u64m2(...) __riscv_vor_vx_u64m2(__VA_ARGS__) |
| #define | vor_vx_u64m2_m(...) __riscv_vor_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vor_vx_u64m4(...) __riscv_vor_vx_u64m4(__VA_ARGS__) |
| #define | vor_vx_u64m4_m(...) __riscv_vor_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vor_vx_u64m8(...) __riscv_vor_vx_u64m8(__VA_ARGS__) |
| #define | vor_vx_u64m8_m(...) __riscv_vor_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vor_vx_u8m1(...) __riscv_vor_vx_u8m1(__VA_ARGS__) |
| #define | vor_vx_u8m1_m(...) __riscv_vor_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vor_vx_u8m2(...) __riscv_vor_vx_u8m2(__VA_ARGS__) |
| #define | vor_vx_u8m2_m(...) __riscv_vor_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vor_vx_u8m4(...) __riscv_vor_vx_u8m4(__VA_ARGS__) |
| #define | vor_vx_u8m4_m(...) __riscv_vor_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vor_vx_u8m8(...) __riscv_vor_vx_u8m8(__VA_ARGS__) |
| #define | vor_vx_u8m8_m(...) __riscv_vor_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vor_vx_u8mf2(...) __riscv_vor_vx_u8mf2(__VA_ARGS__) |
| #define | vor_vx_u8mf2_m(...) __riscv_vor_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vor_vx_u8mf4(...) __riscv_vor_vx_u8mf4(__VA_ARGS__) |
| #define | vor_vx_u8mf4_m(...) __riscv_vor_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vor_vx_u8mf8(...) __riscv_vor_vx_u8mf8(__VA_ARGS__) |
| #define | vor_vx_u8mf8_m(...) __riscv_vor_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vredand_vs_i16m1_i16m1(...) __riscv_vredand_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i16m1_i16m1_m(...) __riscv_vredand_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i16m2_i16m1(...) __riscv_vredand_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i16m2_i16m1_m(...) __riscv_vredand_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i16m4_i16m1(...) __riscv_vredand_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i16m4_i16m1_m(...) __riscv_vredand_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i16m8_i16m1(...) __riscv_vredand_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i16m8_i16m1_m(...) __riscv_vredand_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i16mf2_i16m1(...) __riscv_vredand_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i16mf2_i16m1_m(...) __riscv_vredand_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i16mf4_i16m1(...) __riscv_vredand_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i16mf4_i16m1_m(...) __riscv_vredand_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i32m1_i32m1(...) __riscv_vredand_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i32m1_i32m1_m(...) __riscv_vredand_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i32m2_i32m1(...) __riscv_vredand_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i32m2_i32m1_m(...) __riscv_vredand_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i32m4_i32m1(...) __riscv_vredand_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i32m4_i32m1_m(...) __riscv_vredand_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i32m8_i32m1(...) __riscv_vredand_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i32m8_i32m1_m(...) __riscv_vredand_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i32mf2_i32m1(...) __riscv_vredand_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i32mf2_i32m1_m(...) __riscv_vredand_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i64m1_i64m1(...) __riscv_vredand_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i64m1_i64m1_m(...) __riscv_vredand_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i64m2_i64m1(...) __riscv_vredand_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i64m2_i64m1_m(...) __riscv_vredand_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i64m4_i64m1(...) __riscv_vredand_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i64m4_i64m1_m(...) __riscv_vredand_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i64m8_i64m1(...) __riscv_vredand_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i64m8_i64m1_m(...) __riscv_vredand_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i8m1_i8m1(...) __riscv_vredand_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i8m1_i8m1_m(...) __riscv_vredand_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i8m2_i8m1(...) __riscv_vredand_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i8m2_i8m1_m(...) __riscv_vredand_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i8m4_i8m1(...) __riscv_vredand_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i8m4_i8m1_m(...) __riscv_vredand_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i8m8_i8m1(...) __riscv_vredand_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i8m8_i8m1_m(...) __riscv_vredand_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i8mf2_i8m1(...) __riscv_vredand_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i8mf2_i8m1_m(...) __riscv_vredand_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i8mf4_i8m1(...) __riscv_vredand_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i8mf4_i8m1_m(...) __riscv_vredand_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_i8mf8_i8m1(...) __riscv_vredand_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_i8mf8_i8m1_m(...) __riscv_vredand_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u16m1_u16m1(...) __riscv_vredand_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u16m1_u16m1_m(...) __riscv_vredand_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u16m2_u16m1(...) __riscv_vredand_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u16m2_u16m1_m(...) __riscv_vredand_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u16m4_u16m1(...) __riscv_vredand_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u16m4_u16m1_m(...) __riscv_vredand_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u16m8_u16m1(...) __riscv_vredand_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u16m8_u16m1_m(...) __riscv_vredand_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u16mf2_u16m1(...) __riscv_vredand_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u16mf2_u16m1_m(...) __riscv_vredand_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u16mf4_u16m1(...) __riscv_vredand_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u16mf4_u16m1_m(...) __riscv_vredand_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u32m1_u32m1(...) __riscv_vredand_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u32m1_u32m1_m(...) __riscv_vredand_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u32m2_u32m1(...) __riscv_vredand_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u32m2_u32m1_m(...) __riscv_vredand_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u32m4_u32m1(...) __riscv_vredand_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u32m4_u32m1_m(...) __riscv_vredand_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u32m8_u32m1(...) __riscv_vredand_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u32m8_u32m1_m(...) __riscv_vredand_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u32mf2_u32m1(...) __riscv_vredand_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u32mf2_u32m1_m(...) __riscv_vredand_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u64m1_u64m1(...) __riscv_vredand_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u64m1_u64m1_m(...) __riscv_vredand_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u64m2_u64m1(...) __riscv_vredand_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u64m2_u64m1_m(...) __riscv_vredand_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u64m4_u64m1(...) __riscv_vredand_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u64m4_u64m1_m(...) __riscv_vredand_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u64m8_u64m1(...) __riscv_vredand_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u64m8_u64m1_m(...) __riscv_vredand_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u8m1_u8m1(...) __riscv_vredand_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u8m1_u8m1_m(...) __riscv_vredand_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u8m2_u8m1(...) __riscv_vredand_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u8m2_u8m1_m(...) __riscv_vredand_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u8m4_u8m1(...) __riscv_vredand_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u8m4_u8m1_m(...) __riscv_vredand_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u8m8_u8m1(...) __riscv_vredand_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u8m8_u8m1_m(...) __riscv_vredand_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u8mf2_u8m1(...) __riscv_vredand_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u8mf2_u8m1_m(...) __riscv_vredand_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u8mf4_u8m1(...) __riscv_vredand_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u8mf4_u8m1_m(...) __riscv_vredand_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define | vredand_vs_u8mf8_u8m1(...) __riscv_vredand_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define | vredand_vs_u8mf8_u8m1_m(...) __riscv_vredand_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i16m1_i16m1(...) __riscv_vredmax_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i16m1_i16m1_m(...) __riscv_vredmax_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i16m2_i16m1(...) __riscv_vredmax_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i16m2_i16m1_m(...) __riscv_vredmax_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i16m4_i16m1(...) __riscv_vredmax_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i16m4_i16m1_m(...) __riscv_vredmax_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i16m8_i16m1(...) __riscv_vredmax_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i16m8_i16m1_m(...) __riscv_vredmax_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i16mf2_i16m1(...) __riscv_vredmax_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i16mf2_i16m1_m(...) __riscv_vredmax_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i16mf4_i16m1(...) __riscv_vredmax_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i16mf4_i16m1_m(...) __riscv_vredmax_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i32m1_i32m1(...) __riscv_vredmax_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i32m1_i32m1_m(...) __riscv_vredmax_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i32m2_i32m1(...) __riscv_vredmax_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i32m2_i32m1_m(...) __riscv_vredmax_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i32m4_i32m1(...) __riscv_vredmax_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i32m4_i32m1_m(...) __riscv_vredmax_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i32m8_i32m1(...) __riscv_vredmax_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i32m8_i32m1_m(...) __riscv_vredmax_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i32mf2_i32m1(...) __riscv_vredmax_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i32mf2_i32m1_m(...) __riscv_vredmax_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i64m1_i64m1(...) __riscv_vredmax_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i64m1_i64m1_m(...) __riscv_vredmax_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i64m2_i64m1(...) __riscv_vredmax_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i64m2_i64m1_m(...) __riscv_vredmax_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i64m4_i64m1(...) __riscv_vredmax_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i64m4_i64m1_m(...) __riscv_vredmax_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i64m8_i64m1(...) __riscv_vredmax_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i64m8_i64m1_m(...) __riscv_vredmax_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i8m1_i8m1(...) __riscv_vredmax_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i8m1_i8m1_m(...) __riscv_vredmax_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i8m2_i8m1(...) __riscv_vredmax_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i8m2_i8m1_m(...) __riscv_vredmax_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i8m4_i8m1(...) __riscv_vredmax_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i8m4_i8m1_m(...) __riscv_vredmax_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i8m8_i8m1(...) __riscv_vredmax_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i8m8_i8m1_m(...) __riscv_vredmax_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i8mf2_i8m1(...) __riscv_vredmax_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i8mf2_i8m1_m(...) __riscv_vredmax_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i8mf4_i8m1(...) __riscv_vredmax_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i8mf4_i8m1_m(...) __riscv_vredmax_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define | vredmax_vs_i8mf8_i8m1(...) __riscv_vredmax_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define | vredmax_vs_i8mf8_i8m1_m(...) __riscv_vredmax_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u16m1_u16m1(...) __riscv_vredmaxu_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u16m1_u16m1_m(...) __riscv_vredmaxu_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u16m2_u16m1(...) __riscv_vredmaxu_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u16m2_u16m1_m(...) __riscv_vredmaxu_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u16m4_u16m1(...) __riscv_vredmaxu_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u16m4_u16m1_m(...) __riscv_vredmaxu_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u16m8_u16m1(...) __riscv_vredmaxu_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u16m8_u16m1_m(...) __riscv_vredmaxu_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u16mf2_u16m1(...) __riscv_vredmaxu_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u16mf2_u16m1_m(...) __riscv_vredmaxu_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u16mf4_u16m1(...) __riscv_vredmaxu_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u16mf4_u16m1_m(...) __riscv_vredmaxu_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u32m1_u32m1(...) __riscv_vredmaxu_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u32m1_u32m1_m(...) __riscv_vredmaxu_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u32m2_u32m1(...) __riscv_vredmaxu_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u32m2_u32m1_m(...) __riscv_vredmaxu_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u32m4_u32m1(...) __riscv_vredmaxu_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u32m4_u32m1_m(...) __riscv_vredmaxu_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u32m8_u32m1(...) __riscv_vredmaxu_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u32m8_u32m1_m(...) __riscv_vredmaxu_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u32mf2_u32m1(...) __riscv_vredmaxu_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u32mf2_u32m1_m(...) __riscv_vredmaxu_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u64m1_u64m1(...) __riscv_vredmaxu_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u64m1_u64m1_m(...) __riscv_vredmaxu_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u64m2_u64m1(...) __riscv_vredmaxu_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u64m2_u64m1_m(...) __riscv_vredmaxu_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u64m4_u64m1(...) __riscv_vredmaxu_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u64m4_u64m1_m(...) __riscv_vredmaxu_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u64m8_u64m1(...) __riscv_vredmaxu_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u64m8_u64m1_m(...) __riscv_vredmaxu_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u8m1_u8m1(...) __riscv_vredmaxu_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u8m1_u8m1_m(...) __riscv_vredmaxu_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u8m2_u8m1(...) __riscv_vredmaxu_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u8m2_u8m1_m(...) __riscv_vredmaxu_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u8m4_u8m1(...) __riscv_vredmaxu_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u8m4_u8m1_m(...) __riscv_vredmaxu_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u8m8_u8m1(...) __riscv_vredmaxu_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u8m8_u8m1_m(...) __riscv_vredmaxu_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u8mf2_u8m1(...) __riscv_vredmaxu_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u8mf2_u8m1_m(...) __riscv_vredmaxu_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u8mf4_u8m1(...) __riscv_vredmaxu_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u8mf4_u8m1_m(...) __riscv_vredmaxu_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define | vredmaxu_vs_u8mf8_u8m1(...) __riscv_vredmaxu_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define | vredmaxu_vs_u8mf8_u8m1_m(...) __riscv_vredmaxu_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i16m1_i16m1(...) __riscv_vredmin_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i16m1_i16m1_m(...) __riscv_vredmin_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i16m2_i16m1(...) __riscv_vredmin_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i16m2_i16m1_m(...) __riscv_vredmin_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i16m4_i16m1(...) __riscv_vredmin_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i16m4_i16m1_m(...) __riscv_vredmin_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i16m8_i16m1(...) __riscv_vredmin_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i16m8_i16m1_m(...) __riscv_vredmin_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i16mf2_i16m1(...) __riscv_vredmin_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i16mf2_i16m1_m(...) __riscv_vredmin_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i16mf4_i16m1(...) __riscv_vredmin_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i16mf4_i16m1_m(...) __riscv_vredmin_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i32m1_i32m1(...) __riscv_vredmin_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i32m1_i32m1_m(...) __riscv_vredmin_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i32m2_i32m1(...) __riscv_vredmin_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i32m2_i32m1_m(...) __riscv_vredmin_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i32m4_i32m1(...) __riscv_vredmin_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i32m4_i32m1_m(...) __riscv_vredmin_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i32m8_i32m1(...) __riscv_vredmin_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i32m8_i32m1_m(...) __riscv_vredmin_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i32mf2_i32m1(...) __riscv_vredmin_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i32mf2_i32m1_m(...) __riscv_vredmin_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i64m1_i64m1(...) __riscv_vredmin_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i64m1_i64m1_m(...) __riscv_vredmin_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i64m2_i64m1(...) __riscv_vredmin_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i64m2_i64m1_m(...) __riscv_vredmin_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i64m4_i64m1(...) __riscv_vredmin_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i64m4_i64m1_m(...) __riscv_vredmin_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i64m8_i64m1(...) __riscv_vredmin_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i64m8_i64m1_m(...) __riscv_vredmin_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i8m1_i8m1(...) __riscv_vredmin_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i8m1_i8m1_m(...) __riscv_vredmin_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i8m2_i8m1(...) __riscv_vredmin_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i8m2_i8m1_m(...) __riscv_vredmin_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i8m4_i8m1(...) __riscv_vredmin_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i8m4_i8m1_m(...) __riscv_vredmin_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i8m8_i8m1(...) __riscv_vredmin_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i8m8_i8m1_m(...) __riscv_vredmin_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i8mf2_i8m1(...) __riscv_vredmin_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i8mf2_i8m1_m(...) __riscv_vredmin_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i8mf4_i8m1(...) __riscv_vredmin_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i8mf4_i8m1_m(...) __riscv_vredmin_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define | vredmin_vs_i8mf8_i8m1(...) __riscv_vredmin_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define | vredmin_vs_i8mf8_i8m1_m(...) __riscv_vredmin_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u16m1_u16m1(...) __riscv_vredminu_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u16m1_u16m1_m(...) __riscv_vredminu_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u16m2_u16m1(...) __riscv_vredminu_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u16m2_u16m1_m(...) __riscv_vredminu_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u16m4_u16m1(...) __riscv_vredminu_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u16m4_u16m1_m(...) __riscv_vredminu_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u16m8_u16m1(...) __riscv_vredminu_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u16m8_u16m1_m(...) __riscv_vredminu_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u16mf2_u16m1(...) __riscv_vredminu_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u16mf2_u16m1_m(...) __riscv_vredminu_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u16mf4_u16m1(...) __riscv_vredminu_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u16mf4_u16m1_m(...) __riscv_vredminu_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u32m1_u32m1(...) __riscv_vredminu_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u32m1_u32m1_m(...) __riscv_vredminu_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u32m2_u32m1(...) __riscv_vredminu_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u32m2_u32m1_m(...) __riscv_vredminu_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u32m4_u32m1(...) __riscv_vredminu_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u32m4_u32m1_m(...) __riscv_vredminu_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u32m8_u32m1(...) __riscv_vredminu_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u32m8_u32m1_m(...) __riscv_vredminu_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u32mf2_u32m1(...) __riscv_vredminu_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u32mf2_u32m1_m(...) __riscv_vredminu_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u64m1_u64m1(...) __riscv_vredminu_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u64m1_u64m1_m(...) __riscv_vredminu_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u64m2_u64m1(...) __riscv_vredminu_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u64m2_u64m1_m(...) __riscv_vredminu_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u64m4_u64m1(...) __riscv_vredminu_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u64m4_u64m1_m(...) __riscv_vredminu_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u64m8_u64m1(...) __riscv_vredminu_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u64m8_u64m1_m(...) __riscv_vredminu_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u8m1_u8m1(...) __riscv_vredminu_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u8m1_u8m1_m(...) __riscv_vredminu_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u8m2_u8m1(...) __riscv_vredminu_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u8m2_u8m1_m(...) __riscv_vredminu_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u8m4_u8m1(...) __riscv_vredminu_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u8m4_u8m1_m(...) __riscv_vredminu_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u8m8_u8m1(...) __riscv_vredminu_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u8m8_u8m1_m(...) __riscv_vredminu_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u8mf2_u8m1(...) __riscv_vredminu_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u8mf2_u8m1_m(...) __riscv_vredminu_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u8mf4_u8m1(...) __riscv_vredminu_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u8mf4_u8m1_m(...) __riscv_vredminu_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define | vredminu_vs_u8mf8_u8m1(...) __riscv_vredminu_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define | vredminu_vs_u8mf8_u8m1_m(...) __riscv_vredminu_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i16m1_i16m1(...) __riscv_vredor_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i16m1_i16m1_m(...) __riscv_vredor_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i16m2_i16m1(...) __riscv_vredor_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i16m2_i16m1_m(...) __riscv_vredor_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i16m4_i16m1(...) __riscv_vredor_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i16m4_i16m1_m(...) __riscv_vredor_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i16m8_i16m1(...) __riscv_vredor_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i16m8_i16m1_m(...) __riscv_vredor_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i16mf2_i16m1(...) __riscv_vredor_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i16mf2_i16m1_m(...) __riscv_vredor_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i16mf4_i16m1(...) __riscv_vredor_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i16mf4_i16m1_m(...) __riscv_vredor_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i32m1_i32m1(...) __riscv_vredor_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i32m1_i32m1_m(...) __riscv_vredor_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i32m2_i32m1(...) __riscv_vredor_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i32m2_i32m1_m(...) __riscv_vredor_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i32m4_i32m1(...) __riscv_vredor_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i32m4_i32m1_m(...) __riscv_vredor_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i32m8_i32m1(...) __riscv_vredor_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i32m8_i32m1_m(...) __riscv_vredor_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i32mf2_i32m1(...) __riscv_vredor_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i32mf2_i32m1_m(...) __riscv_vredor_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i64m1_i64m1(...) __riscv_vredor_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i64m1_i64m1_m(...) __riscv_vredor_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i64m2_i64m1(...) __riscv_vredor_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i64m2_i64m1_m(...) __riscv_vredor_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i64m4_i64m1(...) __riscv_vredor_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i64m4_i64m1_m(...) __riscv_vredor_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i64m8_i64m1(...) __riscv_vredor_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i64m8_i64m1_m(...) __riscv_vredor_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i8m1_i8m1(...) __riscv_vredor_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i8m1_i8m1_m(...) __riscv_vredor_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i8m2_i8m1(...) __riscv_vredor_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i8m2_i8m1_m(...) __riscv_vredor_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i8m4_i8m1(...) __riscv_vredor_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i8m4_i8m1_m(...) __riscv_vredor_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i8m8_i8m1(...) __riscv_vredor_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i8m8_i8m1_m(...) __riscv_vredor_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i8mf2_i8m1(...) __riscv_vredor_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i8mf2_i8m1_m(...) __riscv_vredor_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i8mf4_i8m1(...) __riscv_vredor_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i8mf4_i8m1_m(...) __riscv_vredor_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_i8mf8_i8m1(...) __riscv_vredor_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_i8mf8_i8m1_m(...) __riscv_vredor_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u16m1_u16m1(...) __riscv_vredor_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u16m1_u16m1_m(...) __riscv_vredor_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u16m2_u16m1(...) __riscv_vredor_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u16m2_u16m1_m(...) __riscv_vredor_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u16m4_u16m1(...) __riscv_vredor_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u16m4_u16m1_m(...) __riscv_vredor_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u16m8_u16m1(...) __riscv_vredor_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u16m8_u16m1_m(...) __riscv_vredor_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u16mf2_u16m1(...) __riscv_vredor_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u16mf2_u16m1_m(...) __riscv_vredor_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u16mf4_u16m1(...) __riscv_vredor_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u16mf4_u16m1_m(...) __riscv_vredor_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u32m1_u32m1(...) __riscv_vredor_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u32m1_u32m1_m(...) __riscv_vredor_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u32m2_u32m1(...) __riscv_vredor_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u32m2_u32m1_m(...) __riscv_vredor_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u32m4_u32m1(...) __riscv_vredor_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u32m4_u32m1_m(...) __riscv_vredor_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u32m8_u32m1(...) __riscv_vredor_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u32m8_u32m1_m(...) __riscv_vredor_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u32mf2_u32m1(...) __riscv_vredor_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u32mf2_u32m1_m(...) __riscv_vredor_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u64m1_u64m1(...) __riscv_vredor_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u64m1_u64m1_m(...) __riscv_vredor_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u64m2_u64m1(...) __riscv_vredor_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u64m2_u64m1_m(...) __riscv_vredor_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u64m4_u64m1(...) __riscv_vredor_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u64m4_u64m1_m(...) __riscv_vredor_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u64m8_u64m1(...) __riscv_vredor_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u64m8_u64m1_m(...) __riscv_vredor_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u8m1_u8m1(...) __riscv_vredor_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u8m1_u8m1_m(...) __riscv_vredor_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u8m2_u8m1(...) __riscv_vredor_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u8m2_u8m1_m(...) __riscv_vredor_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u8m4_u8m1(...) __riscv_vredor_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u8m4_u8m1_m(...) __riscv_vredor_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u8m8_u8m1(...) __riscv_vredor_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u8m8_u8m1_m(...) __riscv_vredor_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u8mf2_u8m1(...) __riscv_vredor_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u8mf2_u8m1_m(...) __riscv_vredor_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u8mf4_u8m1(...) __riscv_vredor_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u8mf4_u8m1_m(...) __riscv_vredor_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define | vredor_vs_u8mf8_u8m1(...) __riscv_vredor_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define | vredor_vs_u8mf8_u8m1_m(...) __riscv_vredor_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i16m1_i16m1(...) __riscv_vredsum_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i16m1_i16m1_m(...) __riscv_vredsum_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i16m2_i16m1(...) __riscv_vredsum_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i16m2_i16m1_m(...) __riscv_vredsum_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i16m4_i16m1(...) __riscv_vredsum_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i16m4_i16m1_m(...) __riscv_vredsum_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i16m8_i16m1(...) __riscv_vredsum_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i16m8_i16m1_m(...) __riscv_vredsum_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i16mf2_i16m1(...) __riscv_vredsum_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i16mf2_i16m1_m(...) __riscv_vredsum_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i16mf4_i16m1(...) __riscv_vredsum_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i16mf4_i16m1_m(...) __riscv_vredsum_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i32m1_i32m1(...) __riscv_vredsum_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i32m1_i32m1_m(...) __riscv_vredsum_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i32m2_i32m1(...) __riscv_vredsum_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i32m2_i32m1_m(...) __riscv_vredsum_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i32m4_i32m1(...) __riscv_vredsum_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i32m4_i32m1_m(...) __riscv_vredsum_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i32m8_i32m1(...) __riscv_vredsum_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i32m8_i32m1_m(...) __riscv_vredsum_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i32mf2_i32m1(...) __riscv_vredsum_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i32mf2_i32m1_m(...) __riscv_vredsum_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i64m1_i64m1(...) __riscv_vredsum_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i64m1_i64m1_m(...) __riscv_vredsum_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i64m2_i64m1(...) __riscv_vredsum_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i64m2_i64m1_m(...) __riscv_vredsum_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i64m4_i64m1(...) __riscv_vredsum_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i64m4_i64m1_m(...) __riscv_vredsum_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i64m8_i64m1(...) __riscv_vredsum_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i64m8_i64m1_m(...) __riscv_vredsum_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i8m1_i8m1(...) __riscv_vredsum_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i8m1_i8m1_m(...) __riscv_vredsum_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i8m2_i8m1(...) __riscv_vredsum_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i8m2_i8m1_m(...) __riscv_vredsum_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i8m4_i8m1(...) __riscv_vredsum_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i8m4_i8m1_m(...) __riscv_vredsum_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i8m8_i8m1(...) __riscv_vredsum_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i8m8_i8m1_m(...) __riscv_vredsum_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i8mf2_i8m1(...) __riscv_vredsum_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i8mf2_i8m1_m(...) __riscv_vredsum_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i8mf4_i8m1(...) __riscv_vredsum_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i8mf4_i8m1_m(...) __riscv_vredsum_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_i8mf8_i8m1(...) __riscv_vredsum_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_i8mf8_i8m1_m(...) __riscv_vredsum_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u16m1_u16m1(...) __riscv_vredsum_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u16m1_u16m1_m(...) __riscv_vredsum_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u16m2_u16m1(...) __riscv_vredsum_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u16m2_u16m1_m(...) __riscv_vredsum_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u16m4_u16m1(...) __riscv_vredsum_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u16m4_u16m1_m(...) __riscv_vredsum_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u16m8_u16m1(...) __riscv_vredsum_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u16m8_u16m1_m(...) __riscv_vredsum_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u16mf2_u16m1(...) __riscv_vredsum_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u16mf2_u16m1_m(...) __riscv_vredsum_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u16mf4_u16m1(...) __riscv_vredsum_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u16mf4_u16m1_m(...) __riscv_vredsum_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u32m1_u32m1(...) __riscv_vredsum_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u32m1_u32m1_m(...) __riscv_vredsum_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u32m2_u32m1(...) __riscv_vredsum_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u32m2_u32m1_m(...) __riscv_vredsum_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u32m4_u32m1(...) __riscv_vredsum_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u32m4_u32m1_m(...) __riscv_vredsum_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u32m8_u32m1(...) __riscv_vredsum_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u32m8_u32m1_m(...) __riscv_vredsum_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u32mf2_u32m1(...) __riscv_vredsum_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u32mf2_u32m1_m(...) __riscv_vredsum_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u64m1_u64m1(...) __riscv_vredsum_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u64m1_u64m1_m(...) __riscv_vredsum_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u64m2_u64m1(...) __riscv_vredsum_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u64m2_u64m1_m(...) __riscv_vredsum_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u64m4_u64m1(...) __riscv_vredsum_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u64m4_u64m1_m(...) __riscv_vredsum_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u64m8_u64m1(...) __riscv_vredsum_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u64m8_u64m1_m(...) __riscv_vredsum_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u8m1_u8m1(...) __riscv_vredsum_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u8m1_u8m1_m(...) __riscv_vredsum_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u8m2_u8m1(...) __riscv_vredsum_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u8m2_u8m1_m(...) __riscv_vredsum_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u8m4_u8m1(...) __riscv_vredsum_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u8m4_u8m1_m(...) __riscv_vredsum_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u8m8_u8m1(...) __riscv_vredsum_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u8m8_u8m1_m(...) __riscv_vredsum_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u8mf2_u8m1(...) __riscv_vredsum_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u8mf2_u8m1_m(...) __riscv_vredsum_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u8mf4_u8m1(...) __riscv_vredsum_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u8mf4_u8m1_m(...) __riscv_vredsum_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define | vredsum_vs_u8mf8_u8m1(...) __riscv_vredsum_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define | vredsum_vs_u8mf8_u8m1_m(...) __riscv_vredsum_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i16m1_i16m1(...) __riscv_vredxor_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i16m1_i16m1_m(...) __riscv_vredxor_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i16m2_i16m1(...) __riscv_vredxor_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i16m2_i16m1_m(...) __riscv_vredxor_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i16m4_i16m1(...) __riscv_vredxor_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i16m4_i16m1_m(...) __riscv_vredxor_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i16m8_i16m1(...) __riscv_vredxor_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i16m8_i16m1_m(...) __riscv_vredxor_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i16mf2_i16m1(...) __riscv_vredxor_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i16mf2_i16m1_m(...) __riscv_vredxor_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i16mf4_i16m1(...) __riscv_vredxor_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i16mf4_i16m1_m(...) __riscv_vredxor_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i32m1_i32m1(...) __riscv_vredxor_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i32m1_i32m1_m(...) __riscv_vredxor_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i32m2_i32m1(...) __riscv_vredxor_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i32m2_i32m1_m(...) __riscv_vredxor_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i32m4_i32m1(...) __riscv_vredxor_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i32m4_i32m1_m(...) __riscv_vredxor_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i32m8_i32m1(...) __riscv_vredxor_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i32m8_i32m1_m(...) __riscv_vredxor_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i32mf2_i32m1(...) __riscv_vredxor_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i32mf2_i32m1_m(...) __riscv_vredxor_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i64m1_i64m1(...) __riscv_vredxor_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i64m1_i64m1_m(...) __riscv_vredxor_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i64m2_i64m1(...) __riscv_vredxor_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i64m2_i64m1_m(...) __riscv_vredxor_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i64m4_i64m1(...) __riscv_vredxor_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i64m4_i64m1_m(...) __riscv_vredxor_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i64m8_i64m1(...) __riscv_vredxor_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i64m8_i64m1_m(...) __riscv_vredxor_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i8m1_i8m1(...) __riscv_vredxor_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i8m1_i8m1_m(...) __riscv_vredxor_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i8m2_i8m1(...) __riscv_vredxor_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i8m2_i8m1_m(...) __riscv_vredxor_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i8m4_i8m1(...) __riscv_vredxor_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i8m4_i8m1_m(...) __riscv_vredxor_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i8m8_i8m1(...) __riscv_vredxor_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i8m8_i8m1_m(...) __riscv_vredxor_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i8mf2_i8m1(...) __riscv_vredxor_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i8mf2_i8m1_m(...) __riscv_vredxor_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i8mf4_i8m1(...) __riscv_vredxor_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i8mf4_i8m1_m(...) __riscv_vredxor_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_i8mf8_i8m1(...) __riscv_vredxor_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_i8mf8_i8m1_m(...) __riscv_vredxor_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u16m1_u16m1(...) __riscv_vredxor_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u16m1_u16m1_m(...) __riscv_vredxor_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u16m2_u16m1(...) __riscv_vredxor_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u16m2_u16m1_m(...) __riscv_vredxor_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u16m4_u16m1(...) __riscv_vredxor_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u16m4_u16m1_m(...) __riscv_vredxor_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u16m8_u16m1(...) __riscv_vredxor_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u16m8_u16m1_m(...) __riscv_vredxor_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u16mf2_u16m1(...) __riscv_vredxor_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u16mf2_u16m1_m(...) __riscv_vredxor_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u16mf4_u16m1(...) __riscv_vredxor_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u16mf4_u16m1_m(...) __riscv_vredxor_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u32m1_u32m1(...) __riscv_vredxor_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u32m1_u32m1_m(...) __riscv_vredxor_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u32m2_u32m1(...) __riscv_vredxor_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u32m2_u32m1_m(...) __riscv_vredxor_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u32m4_u32m1(...) __riscv_vredxor_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u32m4_u32m1_m(...) __riscv_vredxor_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u32m8_u32m1(...) __riscv_vredxor_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u32m8_u32m1_m(...) __riscv_vredxor_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u32mf2_u32m1(...) __riscv_vredxor_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u32mf2_u32m1_m(...) __riscv_vredxor_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u64m1_u64m1(...) __riscv_vredxor_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u64m1_u64m1_m(...) __riscv_vredxor_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u64m2_u64m1(...) __riscv_vredxor_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u64m2_u64m1_m(...) __riscv_vredxor_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u64m4_u64m1(...) __riscv_vredxor_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u64m4_u64m1_m(...) __riscv_vredxor_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u64m8_u64m1(...) __riscv_vredxor_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u64m8_u64m1_m(...) __riscv_vredxor_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u8m1_u8m1(...) __riscv_vredxor_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u8m1_u8m1_m(...) __riscv_vredxor_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u8m2_u8m1(...) __riscv_vredxor_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u8m2_u8m1_m(...) __riscv_vredxor_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u8m4_u8m1(...) __riscv_vredxor_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u8m4_u8m1_m(...) __riscv_vredxor_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u8m8_u8m1(...) __riscv_vredxor_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u8m8_u8m1_m(...) __riscv_vredxor_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u8mf2_u8m1(...) __riscv_vredxor_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u8mf2_u8m1_m(...) __riscv_vredxor_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u8mf4_u8m1(...) __riscv_vredxor_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u8mf4_u8m1_m(...) __riscv_vredxor_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define | vredxor_vs_u8mf8_u8m1(...) __riscv_vredxor_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define | vredxor_vs_u8mf8_u8m1_m(...) __riscv_vredxor_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define | vreinterpret_v_f16m1_i16m1(...) __riscv_vreinterpret_v_f16m1_i16m1(__VA_ARGS__) |
| #define | vreinterpret_v_f16m1_u16m1(...) __riscv_vreinterpret_v_f16m1_u16m1(__VA_ARGS__) |
| #define | vreinterpret_v_f16m2_i16m2(...) __riscv_vreinterpret_v_f16m2_i16m2(__VA_ARGS__) |
| #define | vreinterpret_v_f16m2_u16m2(...) __riscv_vreinterpret_v_f16m2_u16m2(__VA_ARGS__) |
| #define | vreinterpret_v_f16m4_i16m4(...) __riscv_vreinterpret_v_f16m4_i16m4(__VA_ARGS__) |
| #define | vreinterpret_v_f16m4_u16m4(...) __riscv_vreinterpret_v_f16m4_u16m4(__VA_ARGS__) |
| #define | vreinterpret_v_f16m8_i16m8(...) __riscv_vreinterpret_v_f16m8_i16m8(__VA_ARGS__) |
| #define | vreinterpret_v_f16m8_u16m8(...) __riscv_vreinterpret_v_f16m8_u16m8(__VA_ARGS__) |
| #define | vreinterpret_v_f16mf2_i16mf2(...) __riscv_vreinterpret_v_f16mf2_i16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_f16mf2_u16mf2(...) __riscv_vreinterpret_v_f16mf2_u16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_f16mf4_i16mf4(...) __riscv_vreinterpret_v_f16mf4_i16mf4(__VA_ARGS__) |
| #define | vreinterpret_v_f16mf4_u16mf4(...) __riscv_vreinterpret_v_f16mf4_u16mf4(__VA_ARGS__) |
| #define | vreinterpret_v_f32m1_i32m1(...) __riscv_vreinterpret_v_f32m1_i32m1(__VA_ARGS__) |
| #define | vreinterpret_v_f32m1_u32m1(...) __riscv_vreinterpret_v_f32m1_u32m1(__VA_ARGS__) |
| #define | vreinterpret_v_f32m2_i32m2(...) __riscv_vreinterpret_v_f32m2_i32m2(__VA_ARGS__) |
| #define | vreinterpret_v_f32m2_u32m2(...) __riscv_vreinterpret_v_f32m2_u32m2(__VA_ARGS__) |
| #define | vreinterpret_v_f32m4_i32m4(...) __riscv_vreinterpret_v_f32m4_i32m4(__VA_ARGS__) |
| #define | vreinterpret_v_f32m4_u32m4(...) __riscv_vreinterpret_v_f32m4_u32m4(__VA_ARGS__) |
| #define | vreinterpret_v_f32m8_i32m8(...) __riscv_vreinterpret_v_f32m8_i32m8(__VA_ARGS__) |
| #define | vreinterpret_v_f32m8_u32m8(...) __riscv_vreinterpret_v_f32m8_u32m8(__VA_ARGS__) |
| #define | vreinterpret_v_f32mf2_i32mf2(...) __riscv_vreinterpret_v_f32mf2_i32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_f32mf2_u32mf2(...) __riscv_vreinterpret_v_f32mf2_u32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_f64m1_i64m1(...) __riscv_vreinterpret_v_f64m1_i64m1(__VA_ARGS__) |
| #define | vreinterpret_v_f64m1_u64m1(...) __riscv_vreinterpret_v_f64m1_u64m1(__VA_ARGS__) |
| #define | vreinterpret_v_f64m2_i64m2(...) __riscv_vreinterpret_v_f64m2_i64m2(__VA_ARGS__) |
| #define | vreinterpret_v_f64m2_u64m2(...) __riscv_vreinterpret_v_f64m2_u64m2(__VA_ARGS__) |
| #define | vreinterpret_v_f64m4_i64m4(...) __riscv_vreinterpret_v_f64m4_i64m4(__VA_ARGS__) |
| #define | vreinterpret_v_f64m4_u64m4(...) __riscv_vreinterpret_v_f64m4_u64m4(__VA_ARGS__) |
| #define | vreinterpret_v_f64m8_i64m8(...) __riscv_vreinterpret_v_f64m8_i64m8(__VA_ARGS__) |
| #define | vreinterpret_v_f64m8_u64m8(...) __riscv_vreinterpret_v_f64m8_u64m8(__VA_ARGS__) |
| #define | vreinterpret_v_i16m1_f16m1(...) __riscv_vreinterpret_v_i16m1_f16m1(__VA_ARGS__) |
| #define | vreinterpret_v_i16m1_i32m1(...) __riscv_vreinterpret_v_i16m1_i32m1(__VA_ARGS__) |
| #define | vreinterpret_v_i16m1_i64m1(...) __riscv_vreinterpret_v_i16m1_i64m1(__VA_ARGS__) |
| #define | vreinterpret_v_i16m1_i8m1(...) __riscv_vreinterpret_v_i16m1_i8m1(__VA_ARGS__) |
| #define | vreinterpret_v_i16m1_u16m1(...) __riscv_vreinterpret_v_i16m1_u16m1(__VA_ARGS__) |
| #define | vreinterpret_v_i16m2_f16m2(...) __riscv_vreinterpret_v_i16m2_f16m2(__VA_ARGS__) |
| #define | vreinterpret_v_i16m2_i32m2(...) __riscv_vreinterpret_v_i16m2_i32m2(__VA_ARGS__) |
| #define | vreinterpret_v_i16m2_i64m2(...) __riscv_vreinterpret_v_i16m2_i64m2(__VA_ARGS__) |
| #define | vreinterpret_v_i16m2_i8m2(...) __riscv_vreinterpret_v_i16m2_i8m2(__VA_ARGS__) |
| #define | vreinterpret_v_i16m2_u16m2(...) __riscv_vreinterpret_v_i16m2_u16m2(__VA_ARGS__) |
| #define | vreinterpret_v_i16m4_f16m4(...) __riscv_vreinterpret_v_i16m4_f16m4(__VA_ARGS__) |
| #define | vreinterpret_v_i16m4_i32m4(...) __riscv_vreinterpret_v_i16m4_i32m4(__VA_ARGS__) |
| #define | vreinterpret_v_i16m4_i64m4(...) __riscv_vreinterpret_v_i16m4_i64m4(__VA_ARGS__) |
| #define | vreinterpret_v_i16m4_i8m4(...) __riscv_vreinterpret_v_i16m4_i8m4(__VA_ARGS__) |
| #define | vreinterpret_v_i16m4_u16m4(...) __riscv_vreinterpret_v_i16m4_u16m4(__VA_ARGS__) |
| #define | vreinterpret_v_i16m8_f16m8(...) __riscv_vreinterpret_v_i16m8_f16m8(__VA_ARGS__) |
| #define | vreinterpret_v_i16m8_i32m8(...) __riscv_vreinterpret_v_i16m8_i32m8(__VA_ARGS__) |
| #define | vreinterpret_v_i16m8_i64m8(...) __riscv_vreinterpret_v_i16m8_i64m8(__VA_ARGS__) |
| #define | vreinterpret_v_i16m8_i8m8(...) __riscv_vreinterpret_v_i16m8_i8m8(__VA_ARGS__) |
| #define | vreinterpret_v_i16m8_u16m8(...) __riscv_vreinterpret_v_i16m8_u16m8(__VA_ARGS__) |
| #define | vreinterpret_v_i16mf2_f16mf2(...) __riscv_vreinterpret_v_i16mf2_f16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i16mf2_i32mf2(...) __riscv_vreinterpret_v_i16mf2_i32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i16mf2_i8mf2(...) __riscv_vreinterpret_v_i16mf2_i8mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i16mf2_u16mf2(...) __riscv_vreinterpret_v_i16mf2_u16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i16mf4_f16mf4(...) __riscv_vreinterpret_v_i16mf4_f16mf4(__VA_ARGS__) |
| #define | vreinterpret_v_i16mf4_i8mf4(...) __riscv_vreinterpret_v_i16mf4_i8mf4(__VA_ARGS__) |
| #define | vreinterpret_v_i16mf4_u16mf4(...) __riscv_vreinterpret_v_i16mf4_u16mf4(__VA_ARGS__) |
| #define | vreinterpret_v_i32m1_f32m1(...) __riscv_vreinterpret_v_i32m1_f32m1(__VA_ARGS__) |
| #define | vreinterpret_v_i32m1_i16m1(...) __riscv_vreinterpret_v_i32m1_i16m1(__VA_ARGS__) |
| #define | vreinterpret_v_i32m1_i64m1(...) __riscv_vreinterpret_v_i32m1_i64m1(__VA_ARGS__) |
| #define | vreinterpret_v_i32m1_i8m1(...) __riscv_vreinterpret_v_i32m1_i8m1(__VA_ARGS__) |
| #define | vreinterpret_v_i32m1_u32m1(...) __riscv_vreinterpret_v_i32m1_u32m1(__VA_ARGS__) |
| #define | vreinterpret_v_i32m2_f32m2(...) __riscv_vreinterpret_v_i32m2_f32m2(__VA_ARGS__) |
| #define | vreinterpret_v_i32m2_i16m2(...) __riscv_vreinterpret_v_i32m2_i16m2(__VA_ARGS__) |
| #define | vreinterpret_v_i32m2_i64m2(...) __riscv_vreinterpret_v_i32m2_i64m2(__VA_ARGS__) |
| #define | vreinterpret_v_i32m2_i8m2(...) __riscv_vreinterpret_v_i32m2_i8m2(__VA_ARGS__) |
| #define | vreinterpret_v_i32m2_u32m2(...) __riscv_vreinterpret_v_i32m2_u32m2(__VA_ARGS__) |
| #define | vreinterpret_v_i32m4_f32m4(...) __riscv_vreinterpret_v_i32m4_f32m4(__VA_ARGS__) |
| #define | vreinterpret_v_i32m4_i16m4(...) __riscv_vreinterpret_v_i32m4_i16m4(__VA_ARGS__) |
| #define | vreinterpret_v_i32m4_i64m4(...) __riscv_vreinterpret_v_i32m4_i64m4(__VA_ARGS__) |
| #define | vreinterpret_v_i32m4_i8m4(...) __riscv_vreinterpret_v_i32m4_i8m4(__VA_ARGS__) |
| #define | vreinterpret_v_i32m4_u32m4(...) __riscv_vreinterpret_v_i32m4_u32m4(__VA_ARGS__) |
| #define | vreinterpret_v_i32m8_f32m8(...) __riscv_vreinterpret_v_i32m8_f32m8(__VA_ARGS__) |
| #define | vreinterpret_v_i32m8_i16m8(...) __riscv_vreinterpret_v_i32m8_i16m8(__VA_ARGS__) |
| #define | vreinterpret_v_i32m8_i64m8(...) __riscv_vreinterpret_v_i32m8_i64m8(__VA_ARGS__) |
| #define | vreinterpret_v_i32m8_i8m8(...) __riscv_vreinterpret_v_i32m8_i8m8(__VA_ARGS__) |
| #define | vreinterpret_v_i32m8_u32m8(...) __riscv_vreinterpret_v_i32m8_u32m8(__VA_ARGS__) |
| #define | vreinterpret_v_i32mf2_f32mf2(...) __riscv_vreinterpret_v_i32mf2_f32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i32mf2_i16mf2(...) __riscv_vreinterpret_v_i32mf2_i16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i32mf2_i8mf2(...) __riscv_vreinterpret_v_i32mf2_i8mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i32mf2_u32mf2(...) __riscv_vreinterpret_v_i32mf2_u32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i64m1_f64m1(...) __riscv_vreinterpret_v_i64m1_f64m1(__VA_ARGS__) |
| #define | vreinterpret_v_i64m1_i16m1(...) __riscv_vreinterpret_v_i64m1_i16m1(__VA_ARGS__) |
| #define | vreinterpret_v_i64m1_i32m1(...) __riscv_vreinterpret_v_i64m1_i32m1(__VA_ARGS__) |
| #define | vreinterpret_v_i64m1_i8m1(...) __riscv_vreinterpret_v_i64m1_i8m1(__VA_ARGS__) |
| #define | vreinterpret_v_i64m1_u64m1(...) __riscv_vreinterpret_v_i64m1_u64m1(__VA_ARGS__) |
| #define | vreinterpret_v_i64m2_f64m2(...) __riscv_vreinterpret_v_i64m2_f64m2(__VA_ARGS__) |
| #define | vreinterpret_v_i64m2_i16m2(...) __riscv_vreinterpret_v_i64m2_i16m2(__VA_ARGS__) |
| #define | vreinterpret_v_i64m2_i32m2(...) __riscv_vreinterpret_v_i64m2_i32m2(__VA_ARGS__) |
| #define | vreinterpret_v_i64m2_i8m2(...) __riscv_vreinterpret_v_i64m2_i8m2(__VA_ARGS__) |
| #define | vreinterpret_v_i64m2_u64m2(...) __riscv_vreinterpret_v_i64m2_u64m2(__VA_ARGS__) |
| #define | vreinterpret_v_i64m4_f64m4(...) __riscv_vreinterpret_v_i64m4_f64m4(__VA_ARGS__) |
| #define | vreinterpret_v_i64m4_i16m4(...) __riscv_vreinterpret_v_i64m4_i16m4(__VA_ARGS__) |
| #define | vreinterpret_v_i64m4_i32m4(...) __riscv_vreinterpret_v_i64m4_i32m4(__VA_ARGS__) |
| #define | vreinterpret_v_i64m4_i8m4(...) __riscv_vreinterpret_v_i64m4_i8m4(__VA_ARGS__) |
| #define | vreinterpret_v_i64m4_u64m4(...) __riscv_vreinterpret_v_i64m4_u64m4(__VA_ARGS__) |
| #define | vreinterpret_v_i64m8_f64m8(...) __riscv_vreinterpret_v_i64m8_f64m8(__VA_ARGS__) |
| #define | vreinterpret_v_i64m8_i16m8(...) __riscv_vreinterpret_v_i64m8_i16m8(__VA_ARGS__) |
| #define | vreinterpret_v_i64m8_i32m8(...) __riscv_vreinterpret_v_i64m8_i32m8(__VA_ARGS__) |
| #define | vreinterpret_v_i64m8_i8m8(...) __riscv_vreinterpret_v_i64m8_i8m8(__VA_ARGS__) |
| #define | vreinterpret_v_i64m8_u64m8(...) __riscv_vreinterpret_v_i64m8_u64m8(__VA_ARGS__) |
| #define | vreinterpret_v_i8m1_i16m1(...) __riscv_vreinterpret_v_i8m1_i16m1(__VA_ARGS__) |
| #define | vreinterpret_v_i8m1_i32m1(...) __riscv_vreinterpret_v_i8m1_i32m1(__VA_ARGS__) |
| #define | vreinterpret_v_i8m1_i64m1(...) __riscv_vreinterpret_v_i8m1_i64m1(__VA_ARGS__) |
| #define | vreinterpret_v_i8m1_u8m1(...) __riscv_vreinterpret_v_i8m1_u8m1(__VA_ARGS__) |
| #define | vreinterpret_v_i8m2_i16m2(...) __riscv_vreinterpret_v_i8m2_i16m2(__VA_ARGS__) |
| #define | vreinterpret_v_i8m2_i32m2(...) __riscv_vreinterpret_v_i8m2_i32m2(__VA_ARGS__) |
| #define | vreinterpret_v_i8m2_i64m2(...) __riscv_vreinterpret_v_i8m2_i64m2(__VA_ARGS__) |
| #define | vreinterpret_v_i8m2_u8m2(...) __riscv_vreinterpret_v_i8m2_u8m2(__VA_ARGS__) |
| #define | vreinterpret_v_i8m4_i16m4(...) __riscv_vreinterpret_v_i8m4_i16m4(__VA_ARGS__) |
| #define | vreinterpret_v_i8m4_i32m4(...) __riscv_vreinterpret_v_i8m4_i32m4(__VA_ARGS__) |
| #define | vreinterpret_v_i8m4_i64m4(...) __riscv_vreinterpret_v_i8m4_i64m4(__VA_ARGS__) |
| #define | vreinterpret_v_i8m4_u8m4(...) __riscv_vreinterpret_v_i8m4_u8m4(__VA_ARGS__) |
| #define | vreinterpret_v_i8m8_i16m8(...) __riscv_vreinterpret_v_i8m8_i16m8(__VA_ARGS__) |
| #define | vreinterpret_v_i8m8_i32m8(...) __riscv_vreinterpret_v_i8m8_i32m8(__VA_ARGS__) |
| #define | vreinterpret_v_i8m8_i64m8(...) __riscv_vreinterpret_v_i8m8_i64m8(__VA_ARGS__) |
| #define | vreinterpret_v_i8m8_u8m8(...) __riscv_vreinterpret_v_i8m8_u8m8(__VA_ARGS__) |
| #define | vreinterpret_v_i8mf2_i16mf2(...) __riscv_vreinterpret_v_i8mf2_i16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i8mf2_i32mf2(...) __riscv_vreinterpret_v_i8mf2_i32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i8mf2_u8mf2(...) __riscv_vreinterpret_v_i8mf2_u8mf2(__VA_ARGS__) |
| #define | vreinterpret_v_i8mf4_i16mf4(...) __riscv_vreinterpret_v_i8mf4_i16mf4(__VA_ARGS__) |
| #define | vreinterpret_v_i8mf4_u8mf4(...) __riscv_vreinterpret_v_i8mf4_u8mf4(__VA_ARGS__) |
| #define | vreinterpret_v_i8mf8_u8mf8(...) __riscv_vreinterpret_v_i8mf8_u8mf8(__VA_ARGS__) |
| #define | vreinterpret_v_u16m1_f16m1(...) __riscv_vreinterpret_v_u16m1_f16m1(__VA_ARGS__) |
| #define | vreinterpret_v_u16m1_i16m1(...) __riscv_vreinterpret_v_u16m1_i16m1(__VA_ARGS__) |
| #define | vreinterpret_v_u16m1_u32m1(...) __riscv_vreinterpret_v_u16m1_u32m1(__VA_ARGS__) |
| #define | vreinterpret_v_u16m1_u64m1(...) __riscv_vreinterpret_v_u16m1_u64m1(__VA_ARGS__) |
| #define | vreinterpret_v_u16m1_u8m1(...) __riscv_vreinterpret_v_u16m1_u8m1(__VA_ARGS__) |
| #define | vreinterpret_v_u16m2_f16m2(...) __riscv_vreinterpret_v_u16m2_f16m2(__VA_ARGS__) |
| #define | vreinterpret_v_u16m2_i16m2(...) __riscv_vreinterpret_v_u16m2_i16m2(__VA_ARGS__) |
| #define | vreinterpret_v_u16m2_u32m2(...) __riscv_vreinterpret_v_u16m2_u32m2(__VA_ARGS__) |
| #define | vreinterpret_v_u16m2_u64m2(...) __riscv_vreinterpret_v_u16m2_u64m2(__VA_ARGS__) |
| #define | vreinterpret_v_u16m2_u8m2(...) __riscv_vreinterpret_v_u16m2_u8m2(__VA_ARGS__) |
| #define | vreinterpret_v_u16m4_f16m4(...) __riscv_vreinterpret_v_u16m4_f16m4(__VA_ARGS__) |
| #define | vreinterpret_v_u16m4_i16m4(...) __riscv_vreinterpret_v_u16m4_i16m4(__VA_ARGS__) |
| #define | vreinterpret_v_u16m4_u32m4(...) __riscv_vreinterpret_v_u16m4_u32m4(__VA_ARGS__) |
| #define | vreinterpret_v_u16m4_u64m4(...) __riscv_vreinterpret_v_u16m4_u64m4(__VA_ARGS__) |
| #define | vreinterpret_v_u16m4_u8m4(...) __riscv_vreinterpret_v_u16m4_u8m4(__VA_ARGS__) |
| #define | vreinterpret_v_u16m8_f16m8(...) __riscv_vreinterpret_v_u16m8_f16m8(__VA_ARGS__) |
| #define | vreinterpret_v_u16m8_i16m8(...) __riscv_vreinterpret_v_u16m8_i16m8(__VA_ARGS__) |
| #define | vreinterpret_v_u16m8_u32m8(...) __riscv_vreinterpret_v_u16m8_u32m8(__VA_ARGS__) |
| #define | vreinterpret_v_u16m8_u64m8(...) __riscv_vreinterpret_v_u16m8_u64m8(__VA_ARGS__) |
| #define | vreinterpret_v_u16m8_u8m8(...) __riscv_vreinterpret_v_u16m8_u8m8(__VA_ARGS__) |
| #define | vreinterpret_v_u16mf2_f16mf2(...) __riscv_vreinterpret_v_u16mf2_f16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u16mf2_i16mf2(...) __riscv_vreinterpret_v_u16mf2_i16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u16mf2_u32mf2(...) __riscv_vreinterpret_v_u16mf2_u32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u16mf2_u8mf2(...) __riscv_vreinterpret_v_u16mf2_u8mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u16mf4_f16mf4(...) __riscv_vreinterpret_v_u16mf4_f16mf4(__VA_ARGS__) |
| #define | vreinterpret_v_u16mf4_i16mf4(...) __riscv_vreinterpret_v_u16mf4_i16mf4(__VA_ARGS__) |
| #define | vreinterpret_v_u16mf4_u8mf4(...) __riscv_vreinterpret_v_u16mf4_u8mf4(__VA_ARGS__) |
| #define | vreinterpret_v_u32m1_f32m1(...) __riscv_vreinterpret_v_u32m1_f32m1(__VA_ARGS__) |
| #define | vreinterpret_v_u32m1_i32m1(...) __riscv_vreinterpret_v_u32m1_i32m1(__VA_ARGS__) |
| #define | vreinterpret_v_u32m1_u16m1(...) __riscv_vreinterpret_v_u32m1_u16m1(__VA_ARGS__) |
| #define | vreinterpret_v_u32m1_u64m1(...) __riscv_vreinterpret_v_u32m1_u64m1(__VA_ARGS__) |
| #define | vreinterpret_v_u32m1_u8m1(...) __riscv_vreinterpret_v_u32m1_u8m1(__VA_ARGS__) |
| #define | vreinterpret_v_u32m2_f32m2(...) __riscv_vreinterpret_v_u32m2_f32m2(__VA_ARGS__) |
| #define | vreinterpret_v_u32m2_i32m2(...) __riscv_vreinterpret_v_u32m2_i32m2(__VA_ARGS__) |
| #define | vreinterpret_v_u32m2_u16m2(...) __riscv_vreinterpret_v_u32m2_u16m2(__VA_ARGS__) |
| #define | vreinterpret_v_u32m2_u64m2(...) __riscv_vreinterpret_v_u32m2_u64m2(__VA_ARGS__) |
| #define | vreinterpret_v_u32m2_u8m2(...) __riscv_vreinterpret_v_u32m2_u8m2(__VA_ARGS__) |
| #define | vreinterpret_v_u32m4_f32m4(...) __riscv_vreinterpret_v_u32m4_f32m4(__VA_ARGS__) |
| #define | vreinterpret_v_u32m4_i32m4(...) __riscv_vreinterpret_v_u32m4_i32m4(__VA_ARGS__) |
| #define | vreinterpret_v_u32m4_u16m4(...) __riscv_vreinterpret_v_u32m4_u16m4(__VA_ARGS__) |
| #define | vreinterpret_v_u32m4_u64m4(...) __riscv_vreinterpret_v_u32m4_u64m4(__VA_ARGS__) |
| #define | vreinterpret_v_u32m4_u8m4(...) __riscv_vreinterpret_v_u32m4_u8m4(__VA_ARGS__) |
| #define | vreinterpret_v_u32m8_f32m8(...) __riscv_vreinterpret_v_u32m8_f32m8(__VA_ARGS__) |
| #define | vreinterpret_v_u32m8_i32m8(...) __riscv_vreinterpret_v_u32m8_i32m8(__VA_ARGS__) |
| #define | vreinterpret_v_u32m8_u16m8(...) __riscv_vreinterpret_v_u32m8_u16m8(__VA_ARGS__) |
| #define | vreinterpret_v_u32m8_u64m8(...) __riscv_vreinterpret_v_u32m8_u64m8(__VA_ARGS__) |
| #define | vreinterpret_v_u32m8_u8m8(...) __riscv_vreinterpret_v_u32m8_u8m8(__VA_ARGS__) |
| #define | vreinterpret_v_u32mf2_f32mf2(...) __riscv_vreinterpret_v_u32mf2_f32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u32mf2_i32mf2(...) __riscv_vreinterpret_v_u32mf2_i32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u32mf2_u16mf2(...) __riscv_vreinterpret_v_u32mf2_u16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u32mf2_u8mf2(...) __riscv_vreinterpret_v_u32mf2_u8mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u64m1_f64m1(...) __riscv_vreinterpret_v_u64m1_f64m1(__VA_ARGS__) |
| #define | vreinterpret_v_u64m1_i64m1(...) __riscv_vreinterpret_v_u64m1_i64m1(__VA_ARGS__) |
| #define | vreinterpret_v_u64m1_u16m1(...) __riscv_vreinterpret_v_u64m1_u16m1(__VA_ARGS__) |
| #define | vreinterpret_v_u64m1_u32m1(...) __riscv_vreinterpret_v_u64m1_u32m1(__VA_ARGS__) |
| #define | vreinterpret_v_u64m1_u8m1(...) __riscv_vreinterpret_v_u64m1_u8m1(__VA_ARGS__) |
| #define | vreinterpret_v_u64m2_f64m2(...) __riscv_vreinterpret_v_u64m2_f64m2(__VA_ARGS__) |
| #define | vreinterpret_v_u64m2_i64m2(...) __riscv_vreinterpret_v_u64m2_i64m2(__VA_ARGS__) |
| #define | vreinterpret_v_u64m2_u16m2(...) __riscv_vreinterpret_v_u64m2_u16m2(__VA_ARGS__) |
| #define | vreinterpret_v_u64m2_u32m2(...) __riscv_vreinterpret_v_u64m2_u32m2(__VA_ARGS__) |
| #define | vreinterpret_v_u64m2_u8m2(...) __riscv_vreinterpret_v_u64m2_u8m2(__VA_ARGS__) |
| #define | vreinterpret_v_u64m4_f64m4(...) __riscv_vreinterpret_v_u64m4_f64m4(__VA_ARGS__) |
| #define | vreinterpret_v_u64m4_i64m4(...) __riscv_vreinterpret_v_u64m4_i64m4(__VA_ARGS__) |
| #define | vreinterpret_v_u64m4_u16m4(...) __riscv_vreinterpret_v_u64m4_u16m4(__VA_ARGS__) |
| #define | vreinterpret_v_u64m4_u32m4(...) __riscv_vreinterpret_v_u64m4_u32m4(__VA_ARGS__) |
| #define | vreinterpret_v_u64m4_u8m4(...) __riscv_vreinterpret_v_u64m4_u8m4(__VA_ARGS__) |
| #define | vreinterpret_v_u64m8_f64m8(...) __riscv_vreinterpret_v_u64m8_f64m8(__VA_ARGS__) |
| #define | vreinterpret_v_u64m8_i64m8(...) __riscv_vreinterpret_v_u64m8_i64m8(__VA_ARGS__) |
| #define | vreinterpret_v_u64m8_u16m8(...) __riscv_vreinterpret_v_u64m8_u16m8(__VA_ARGS__) |
| #define | vreinterpret_v_u64m8_u32m8(...) __riscv_vreinterpret_v_u64m8_u32m8(__VA_ARGS__) |
| #define | vreinterpret_v_u64m8_u8m8(...) __riscv_vreinterpret_v_u64m8_u8m8(__VA_ARGS__) |
| #define | vreinterpret_v_u8m1_i8m1(...) __riscv_vreinterpret_v_u8m1_i8m1(__VA_ARGS__) |
| #define | vreinterpret_v_u8m1_u16m1(...) __riscv_vreinterpret_v_u8m1_u16m1(__VA_ARGS__) |
| #define | vreinterpret_v_u8m1_u32m1(...) __riscv_vreinterpret_v_u8m1_u32m1(__VA_ARGS__) |
| #define | vreinterpret_v_u8m1_u64m1(...) __riscv_vreinterpret_v_u8m1_u64m1(__VA_ARGS__) |
| #define | vreinterpret_v_u8m2_i8m2(...) __riscv_vreinterpret_v_u8m2_i8m2(__VA_ARGS__) |
| #define | vreinterpret_v_u8m2_u16m2(...) __riscv_vreinterpret_v_u8m2_u16m2(__VA_ARGS__) |
| #define | vreinterpret_v_u8m2_u32m2(...) __riscv_vreinterpret_v_u8m2_u32m2(__VA_ARGS__) |
| #define | vreinterpret_v_u8m2_u64m2(...) __riscv_vreinterpret_v_u8m2_u64m2(__VA_ARGS__) |
| #define | vreinterpret_v_u8m4_i8m4(...) __riscv_vreinterpret_v_u8m4_i8m4(__VA_ARGS__) |
| #define | vreinterpret_v_u8m4_u16m4(...) __riscv_vreinterpret_v_u8m4_u16m4(__VA_ARGS__) |
| #define | vreinterpret_v_u8m4_u32m4(...) __riscv_vreinterpret_v_u8m4_u32m4(__VA_ARGS__) |
| #define | vreinterpret_v_u8m4_u64m4(...) __riscv_vreinterpret_v_u8m4_u64m4(__VA_ARGS__) |
| #define | vreinterpret_v_u8m8_i8m8(...) __riscv_vreinterpret_v_u8m8_i8m8(__VA_ARGS__) |
| #define | vreinterpret_v_u8m8_u16m8(...) __riscv_vreinterpret_v_u8m8_u16m8(__VA_ARGS__) |
| #define | vreinterpret_v_u8m8_u32m8(...) __riscv_vreinterpret_v_u8m8_u32m8(__VA_ARGS__) |
| #define | vreinterpret_v_u8m8_u64m8(...) __riscv_vreinterpret_v_u8m8_u64m8(__VA_ARGS__) |
| #define | vreinterpret_v_u8mf2_i8mf2(...) __riscv_vreinterpret_v_u8mf2_i8mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u8mf2_u16mf2(...) __riscv_vreinterpret_v_u8mf2_u16mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u8mf2_u32mf2(...) __riscv_vreinterpret_v_u8mf2_u32mf2(__VA_ARGS__) |
| #define | vreinterpret_v_u8mf4_i8mf4(...) __riscv_vreinterpret_v_u8mf4_i8mf4(__VA_ARGS__) |
| #define | vreinterpret_v_u8mf4_u16mf4(...) __riscv_vreinterpret_v_u8mf4_u16mf4(__VA_ARGS__) |
| #define | vreinterpret_v_u8mf8_i8mf8(...) __riscv_vreinterpret_v_u8mf8_i8mf8(__VA_ARGS__) |
| #define | vrem_vv_i16m1(...) __riscv_vrem_vv_i16m1(__VA_ARGS__) |
| #define | vrem_vv_i16m1_m(...) __riscv_vrem_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vrem_vv_i16m2(...) __riscv_vrem_vv_i16m2(__VA_ARGS__) |
| #define | vrem_vv_i16m2_m(...) __riscv_vrem_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vrem_vv_i16m4(...) __riscv_vrem_vv_i16m4(__VA_ARGS__) |
| #define | vrem_vv_i16m4_m(...) __riscv_vrem_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vrem_vv_i16m8(...) __riscv_vrem_vv_i16m8(__VA_ARGS__) |
| #define | vrem_vv_i16m8_m(...) __riscv_vrem_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vrem_vv_i16mf2(...) __riscv_vrem_vv_i16mf2(__VA_ARGS__) |
| #define | vrem_vv_i16mf2_m(...) __riscv_vrem_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vrem_vv_i16mf4(...) __riscv_vrem_vv_i16mf4(__VA_ARGS__) |
| #define | vrem_vv_i16mf4_m(...) __riscv_vrem_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vrem_vv_i32m1(...) __riscv_vrem_vv_i32m1(__VA_ARGS__) |
| #define | vrem_vv_i32m1_m(...) __riscv_vrem_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vrem_vv_i32m2(...) __riscv_vrem_vv_i32m2(__VA_ARGS__) |
| #define | vrem_vv_i32m2_m(...) __riscv_vrem_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vrem_vv_i32m4(...) __riscv_vrem_vv_i32m4(__VA_ARGS__) |
| #define | vrem_vv_i32m4_m(...) __riscv_vrem_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vrem_vv_i32m8(...) __riscv_vrem_vv_i32m8(__VA_ARGS__) |
| #define | vrem_vv_i32m8_m(...) __riscv_vrem_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vrem_vv_i32mf2(...) __riscv_vrem_vv_i32mf2(__VA_ARGS__) |
| #define | vrem_vv_i32mf2_m(...) __riscv_vrem_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vrem_vv_i64m1(...) __riscv_vrem_vv_i64m1(__VA_ARGS__) |
| #define | vrem_vv_i64m1_m(...) __riscv_vrem_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vrem_vv_i64m2(...) __riscv_vrem_vv_i64m2(__VA_ARGS__) |
| #define | vrem_vv_i64m2_m(...) __riscv_vrem_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vrem_vv_i64m4(...) __riscv_vrem_vv_i64m4(__VA_ARGS__) |
| #define | vrem_vv_i64m4_m(...) __riscv_vrem_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vrem_vv_i64m8(...) __riscv_vrem_vv_i64m8(__VA_ARGS__) |
| #define | vrem_vv_i64m8_m(...) __riscv_vrem_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vrem_vv_i8m1(...) __riscv_vrem_vv_i8m1(__VA_ARGS__) |
| #define | vrem_vv_i8m1_m(...) __riscv_vrem_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vrem_vv_i8m2(...) __riscv_vrem_vv_i8m2(__VA_ARGS__) |
| #define | vrem_vv_i8m2_m(...) __riscv_vrem_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vrem_vv_i8m4(...) __riscv_vrem_vv_i8m4(__VA_ARGS__) |
| #define | vrem_vv_i8m4_m(...) __riscv_vrem_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vrem_vv_i8m8(...) __riscv_vrem_vv_i8m8(__VA_ARGS__) |
| #define | vrem_vv_i8m8_m(...) __riscv_vrem_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vrem_vv_i8mf2(...) __riscv_vrem_vv_i8mf2(__VA_ARGS__) |
| #define | vrem_vv_i8mf2_m(...) __riscv_vrem_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vrem_vv_i8mf4(...) __riscv_vrem_vv_i8mf4(__VA_ARGS__) |
| #define | vrem_vv_i8mf4_m(...) __riscv_vrem_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vrem_vv_i8mf8(...) __riscv_vrem_vv_i8mf8(__VA_ARGS__) |
| #define | vrem_vv_i8mf8_m(...) __riscv_vrem_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vrem_vx_i16m1(...) __riscv_vrem_vx_i16m1(__VA_ARGS__) |
| #define | vrem_vx_i16m1_m(...) __riscv_vrem_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vrem_vx_i16m2(...) __riscv_vrem_vx_i16m2(__VA_ARGS__) |
| #define | vrem_vx_i16m2_m(...) __riscv_vrem_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vrem_vx_i16m4(...) __riscv_vrem_vx_i16m4(__VA_ARGS__) |
| #define | vrem_vx_i16m4_m(...) __riscv_vrem_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vrem_vx_i16m8(...) __riscv_vrem_vx_i16m8(__VA_ARGS__) |
| #define | vrem_vx_i16m8_m(...) __riscv_vrem_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vrem_vx_i16mf2(...) __riscv_vrem_vx_i16mf2(__VA_ARGS__) |
| #define | vrem_vx_i16mf2_m(...) __riscv_vrem_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vrem_vx_i16mf4(...) __riscv_vrem_vx_i16mf4(__VA_ARGS__) |
| #define | vrem_vx_i16mf4_m(...) __riscv_vrem_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vrem_vx_i32m1(...) __riscv_vrem_vx_i32m1(__VA_ARGS__) |
| #define | vrem_vx_i32m1_m(...) __riscv_vrem_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vrem_vx_i32m2(...) __riscv_vrem_vx_i32m2(__VA_ARGS__) |
| #define | vrem_vx_i32m2_m(...) __riscv_vrem_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vrem_vx_i32m4(...) __riscv_vrem_vx_i32m4(__VA_ARGS__) |
| #define | vrem_vx_i32m4_m(...) __riscv_vrem_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vrem_vx_i32m8(...) __riscv_vrem_vx_i32m8(__VA_ARGS__) |
| #define | vrem_vx_i32m8_m(...) __riscv_vrem_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vrem_vx_i32mf2(...) __riscv_vrem_vx_i32mf2(__VA_ARGS__) |
| #define | vrem_vx_i32mf2_m(...) __riscv_vrem_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vrem_vx_i64m1(...) __riscv_vrem_vx_i64m1(__VA_ARGS__) |
| #define | vrem_vx_i64m1_m(...) __riscv_vrem_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vrem_vx_i64m2(...) __riscv_vrem_vx_i64m2(__VA_ARGS__) |
| #define | vrem_vx_i64m2_m(...) __riscv_vrem_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vrem_vx_i64m4(...) __riscv_vrem_vx_i64m4(__VA_ARGS__) |
| #define | vrem_vx_i64m4_m(...) __riscv_vrem_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vrem_vx_i64m8(...) __riscv_vrem_vx_i64m8(__VA_ARGS__) |
| #define | vrem_vx_i64m8_m(...) __riscv_vrem_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vrem_vx_i8m1(...) __riscv_vrem_vx_i8m1(__VA_ARGS__) |
| #define | vrem_vx_i8m1_m(...) __riscv_vrem_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vrem_vx_i8m2(...) __riscv_vrem_vx_i8m2(__VA_ARGS__) |
| #define | vrem_vx_i8m2_m(...) __riscv_vrem_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vrem_vx_i8m4(...) __riscv_vrem_vx_i8m4(__VA_ARGS__) |
| #define | vrem_vx_i8m4_m(...) __riscv_vrem_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vrem_vx_i8m8(...) __riscv_vrem_vx_i8m8(__VA_ARGS__) |
| #define | vrem_vx_i8m8_m(...) __riscv_vrem_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vrem_vx_i8mf2(...) __riscv_vrem_vx_i8mf2(__VA_ARGS__) |
| #define | vrem_vx_i8mf2_m(...) __riscv_vrem_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vrem_vx_i8mf4(...) __riscv_vrem_vx_i8mf4(__VA_ARGS__) |
| #define | vrem_vx_i8mf4_m(...) __riscv_vrem_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vrem_vx_i8mf8(...) __riscv_vrem_vx_i8mf8(__VA_ARGS__) |
| #define | vrem_vx_i8mf8_m(...) __riscv_vrem_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vremu_vv_u16m1(...) __riscv_vremu_vv_u16m1(__VA_ARGS__) |
| #define | vremu_vv_u16m1_m(...) __riscv_vremu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vremu_vv_u16m2(...) __riscv_vremu_vv_u16m2(__VA_ARGS__) |
| #define | vremu_vv_u16m2_m(...) __riscv_vremu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vremu_vv_u16m4(...) __riscv_vremu_vv_u16m4(__VA_ARGS__) |
| #define | vremu_vv_u16m4_m(...) __riscv_vremu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vremu_vv_u16m8(...) __riscv_vremu_vv_u16m8(__VA_ARGS__) |
| #define | vremu_vv_u16m8_m(...) __riscv_vremu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vremu_vv_u16mf2(...) __riscv_vremu_vv_u16mf2(__VA_ARGS__) |
| #define | vremu_vv_u16mf2_m(...) __riscv_vremu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vremu_vv_u16mf4(...) __riscv_vremu_vv_u16mf4(__VA_ARGS__) |
| #define | vremu_vv_u16mf4_m(...) __riscv_vremu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vremu_vv_u32m1(...) __riscv_vremu_vv_u32m1(__VA_ARGS__) |
| #define | vremu_vv_u32m1_m(...) __riscv_vremu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vremu_vv_u32m2(...) __riscv_vremu_vv_u32m2(__VA_ARGS__) |
| #define | vremu_vv_u32m2_m(...) __riscv_vremu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vremu_vv_u32m4(...) __riscv_vremu_vv_u32m4(__VA_ARGS__) |
| #define | vremu_vv_u32m4_m(...) __riscv_vremu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vremu_vv_u32m8(...) __riscv_vremu_vv_u32m8(__VA_ARGS__) |
| #define | vremu_vv_u32m8_m(...) __riscv_vremu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vremu_vv_u32mf2(...) __riscv_vremu_vv_u32mf2(__VA_ARGS__) |
| #define | vremu_vv_u32mf2_m(...) __riscv_vremu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vremu_vv_u64m1(...) __riscv_vremu_vv_u64m1(__VA_ARGS__) |
| #define | vremu_vv_u64m1_m(...) __riscv_vremu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vremu_vv_u64m2(...) __riscv_vremu_vv_u64m2(__VA_ARGS__) |
| #define | vremu_vv_u64m2_m(...) __riscv_vremu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vremu_vv_u64m4(...) __riscv_vremu_vv_u64m4(__VA_ARGS__) |
| #define | vremu_vv_u64m4_m(...) __riscv_vremu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vremu_vv_u64m8(...) __riscv_vremu_vv_u64m8(__VA_ARGS__) |
| #define | vremu_vv_u64m8_m(...) __riscv_vremu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vremu_vv_u8m1(...) __riscv_vremu_vv_u8m1(__VA_ARGS__) |
| #define | vremu_vv_u8m1_m(...) __riscv_vremu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vremu_vv_u8m2(...) __riscv_vremu_vv_u8m2(__VA_ARGS__) |
| #define | vremu_vv_u8m2_m(...) __riscv_vremu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vremu_vv_u8m4(...) __riscv_vremu_vv_u8m4(__VA_ARGS__) |
| #define | vremu_vv_u8m4_m(...) __riscv_vremu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vremu_vv_u8m8(...) __riscv_vremu_vv_u8m8(__VA_ARGS__) |
| #define | vremu_vv_u8m8_m(...) __riscv_vremu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vremu_vv_u8mf2(...) __riscv_vremu_vv_u8mf2(__VA_ARGS__) |
| #define | vremu_vv_u8mf2_m(...) __riscv_vremu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vremu_vv_u8mf4(...) __riscv_vremu_vv_u8mf4(__VA_ARGS__) |
| #define | vremu_vv_u8mf4_m(...) __riscv_vremu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vremu_vv_u8mf8(...) __riscv_vremu_vv_u8mf8(__VA_ARGS__) |
| #define | vremu_vv_u8mf8_m(...) __riscv_vremu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vremu_vx_u16m1(...) __riscv_vremu_vx_u16m1(__VA_ARGS__) |
| #define | vremu_vx_u16m1_m(...) __riscv_vremu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vremu_vx_u16m2(...) __riscv_vremu_vx_u16m2(__VA_ARGS__) |
| #define | vremu_vx_u16m2_m(...) __riscv_vremu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vremu_vx_u16m4(...) __riscv_vremu_vx_u16m4(__VA_ARGS__) |
| #define | vremu_vx_u16m4_m(...) __riscv_vremu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vremu_vx_u16m8(...) __riscv_vremu_vx_u16m8(__VA_ARGS__) |
| #define | vremu_vx_u16m8_m(...) __riscv_vremu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vremu_vx_u16mf2(...) __riscv_vremu_vx_u16mf2(__VA_ARGS__) |
| #define | vremu_vx_u16mf2_m(...) __riscv_vremu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vremu_vx_u16mf4(...) __riscv_vremu_vx_u16mf4(__VA_ARGS__) |
| #define | vremu_vx_u16mf4_m(...) __riscv_vremu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vremu_vx_u32m1(...) __riscv_vremu_vx_u32m1(__VA_ARGS__) |
| #define | vremu_vx_u32m1_m(...) __riscv_vremu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vremu_vx_u32m2(...) __riscv_vremu_vx_u32m2(__VA_ARGS__) |
| #define | vremu_vx_u32m2_m(...) __riscv_vremu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vremu_vx_u32m4(...) __riscv_vremu_vx_u32m4(__VA_ARGS__) |
| #define | vremu_vx_u32m4_m(...) __riscv_vremu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vremu_vx_u32m8(...) __riscv_vremu_vx_u32m8(__VA_ARGS__) |
| #define | vremu_vx_u32m8_m(...) __riscv_vremu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vremu_vx_u32mf2(...) __riscv_vremu_vx_u32mf2(__VA_ARGS__) |
| #define | vremu_vx_u32mf2_m(...) __riscv_vremu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vremu_vx_u64m1(...) __riscv_vremu_vx_u64m1(__VA_ARGS__) |
| #define | vremu_vx_u64m1_m(...) __riscv_vremu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vremu_vx_u64m2(...) __riscv_vremu_vx_u64m2(__VA_ARGS__) |
| #define | vremu_vx_u64m2_m(...) __riscv_vremu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vremu_vx_u64m4(...) __riscv_vremu_vx_u64m4(__VA_ARGS__) |
| #define | vremu_vx_u64m4_m(...) __riscv_vremu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vremu_vx_u64m8(...) __riscv_vremu_vx_u64m8(__VA_ARGS__) |
| #define | vremu_vx_u64m8_m(...) __riscv_vremu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vremu_vx_u8m1(...) __riscv_vremu_vx_u8m1(__VA_ARGS__) |
| #define | vremu_vx_u8m1_m(...) __riscv_vremu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vremu_vx_u8m2(...) __riscv_vremu_vx_u8m2(__VA_ARGS__) |
| #define | vremu_vx_u8m2_m(...) __riscv_vremu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vremu_vx_u8m4(...) __riscv_vremu_vx_u8m4(__VA_ARGS__) |
| #define | vremu_vx_u8m4_m(...) __riscv_vremu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vremu_vx_u8m8(...) __riscv_vremu_vx_u8m8(__VA_ARGS__) |
| #define | vremu_vx_u8m8_m(...) __riscv_vremu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vremu_vx_u8mf2(...) __riscv_vremu_vx_u8mf2(__VA_ARGS__) |
| #define | vremu_vx_u8mf2_m(...) __riscv_vremu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vremu_vx_u8mf4(...) __riscv_vremu_vx_u8mf4(__VA_ARGS__) |
| #define | vremu_vx_u8mf4_m(...) __riscv_vremu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vremu_vx_u8mf8(...) __riscv_vremu_vx_u8mf8(__VA_ARGS__) |
| #define | vremu_vx_u8mf8_m(...) __riscv_vremu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f16m1(...) __riscv_vrgather_vv_f16m1(__VA_ARGS__) |
| #define | vrgather_vv_f16m1_m(...) __riscv_vrgather_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f16m2(...) __riscv_vrgather_vv_f16m2(__VA_ARGS__) |
| #define | vrgather_vv_f16m2_m(...) __riscv_vrgather_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f16m4(...) __riscv_vrgather_vv_f16m4(__VA_ARGS__) |
| #define | vrgather_vv_f16m4_m(...) __riscv_vrgather_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f16m8(...) __riscv_vrgather_vv_f16m8(__VA_ARGS__) |
| #define | vrgather_vv_f16m8_m(...) __riscv_vrgather_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f16mf2(...) __riscv_vrgather_vv_f16mf2(__VA_ARGS__) |
| #define | vrgather_vv_f16mf2_m(...) __riscv_vrgather_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f16mf4(...) __riscv_vrgather_vv_f16mf4(__VA_ARGS__) |
| #define | vrgather_vv_f16mf4_m(...) __riscv_vrgather_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f32m1(...) __riscv_vrgather_vv_f32m1(__VA_ARGS__) |
| #define | vrgather_vv_f32m1_m(...) __riscv_vrgather_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f32m2(...) __riscv_vrgather_vv_f32m2(__VA_ARGS__) |
| #define | vrgather_vv_f32m2_m(...) __riscv_vrgather_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f32m4(...) __riscv_vrgather_vv_f32m4(__VA_ARGS__) |
| #define | vrgather_vv_f32m4_m(...) __riscv_vrgather_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f32m8(...) __riscv_vrgather_vv_f32m8(__VA_ARGS__) |
| #define | vrgather_vv_f32m8_m(...) __riscv_vrgather_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f32mf2(...) __riscv_vrgather_vv_f32mf2(__VA_ARGS__) |
| #define | vrgather_vv_f32mf2_m(...) __riscv_vrgather_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f64m1(...) __riscv_vrgather_vv_f64m1(__VA_ARGS__) |
| #define | vrgather_vv_f64m1_m(...) __riscv_vrgather_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f64m2(...) __riscv_vrgather_vv_f64m2(__VA_ARGS__) |
| #define | vrgather_vv_f64m2_m(...) __riscv_vrgather_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f64m4(...) __riscv_vrgather_vv_f64m4(__VA_ARGS__) |
| #define | vrgather_vv_f64m4_m(...) __riscv_vrgather_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_f64m8(...) __riscv_vrgather_vv_f64m8(__VA_ARGS__) |
| #define | vrgather_vv_f64m8_m(...) __riscv_vrgather_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i16m1(...) __riscv_vrgather_vv_i16m1(__VA_ARGS__) |
| #define | vrgather_vv_i16m1_m(...) __riscv_vrgather_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i16m2(...) __riscv_vrgather_vv_i16m2(__VA_ARGS__) |
| #define | vrgather_vv_i16m2_m(...) __riscv_vrgather_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i16m4(...) __riscv_vrgather_vv_i16m4(__VA_ARGS__) |
| #define | vrgather_vv_i16m4_m(...) __riscv_vrgather_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i16m8(...) __riscv_vrgather_vv_i16m8(__VA_ARGS__) |
| #define | vrgather_vv_i16m8_m(...) __riscv_vrgather_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i16mf2(...) __riscv_vrgather_vv_i16mf2(__VA_ARGS__) |
| #define | vrgather_vv_i16mf2_m(...) __riscv_vrgather_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i16mf4(...) __riscv_vrgather_vv_i16mf4(__VA_ARGS__) |
| #define | vrgather_vv_i16mf4_m(...) __riscv_vrgather_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i32m1(...) __riscv_vrgather_vv_i32m1(__VA_ARGS__) |
| #define | vrgather_vv_i32m1_m(...) __riscv_vrgather_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i32m2(...) __riscv_vrgather_vv_i32m2(__VA_ARGS__) |
| #define | vrgather_vv_i32m2_m(...) __riscv_vrgather_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i32m4(...) __riscv_vrgather_vv_i32m4(__VA_ARGS__) |
| #define | vrgather_vv_i32m4_m(...) __riscv_vrgather_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i32m8(...) __riscv_vrgather_vv_i32m8(__VA_ARGS__) |
| #define | vrgather_vv_i32m8_m(...) __riscv_vrgather_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i32mf2(...) __riscv_vrgather_vv_i32mf2(__VA_ARGS__) |
| #define | vrgather_vv_i32mf2_m(...) __riscv_vrgather_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i64m1(...) __riscv_vrgather_vv_i64m1(__VA_ARGS__) |
| #define | vrgather_vv_i64m1_m(...) __riscv_vrgather_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i64m2(...) __riscv_vrgather_vv_i64m2(__VA_ARGS__) |
| #define | vrgather_vv_i64m2_m(...) __riscv_vrgather_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i64m4(...) __riscv_vrgather_vv_i64m4(__VA_ARGS__) |
| #define | vrgather_vv_i64m4_m(...) __riscv_vrgather_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i64m8(...) __riscv_vrgather_vv_i64m8(__VA_ARGS__) |
| #define | vrgather_vv_i64m8_m(...) __riscv_vrgather_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i8m1(...) __riscv_vrgather_vv_i8m1(__VA_ARGS__) |
| #define | vrgather_vv_i8m1_m(...) __riscv_vrgather_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i8m2(...) __riscv_vrgather_vv_i8m2(__VA_ARGS__) |
| #define | vrgather_vv_i8m2_m(...) __riscv_vrgather_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i8m4(...) __riscv_vrgather_vv_i8m4(__VA_ARGS__) |
| #define | vrgather_vv_i8m4_m(...) __riscv_vrgather_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i8m8(...) __riscv_vrgather_vv_i8m8(__VA_ARGS__) |
| #define | vrgather_vv_i8m8_m(...) __riscv_vrgather_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i8mf2(...) __riscv_vrgather_vv_i8mf2(__VA_ARGS__) |
| #define | vrgather_vv_i8mf2_m(...) __riscv_vrgather_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i8mf4(...) __riscv_vrgather_vv_i8mf4(__VA_ARGS__) |
| #define | vrgather_vv_i8mf4_m(...) __riscv_vrgather_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_i8mf8(...) __riscv_vrgather_vv_i8mf8(__VA_ARGS__) |
| #define | vrgather_vv_i8mf8_m(...) __riscv_vrgather_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u16m1(...) __riscv_vrgather_vv_u16m1(__VA_ARGS__) |
| #define | vrgather_vv_u16m1_m(...) __riscv_vrgather_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u16m2(...) __riscv_vrgather_vv_u16m2(__VA_ARGS__) |
| #define | vrgather_vv_u16m2_m(...) __riscv_vrgather_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u16m4(...) __riscv_vrgather_vv_u16m4(__VA_ARGS__) |
| #define | vrgather_vv_u16m4_m(...) __riscv_vrgather_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u16m8(...) __riscv_vrgather_vv_u16m8(__VA_ARGS__) |
| #define | vrgather_vv_u16m8_m(...) __riscv_vrgather_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u16mf2(...) __riscv_vrgather_vv_u16mf2(__VA_ARGS__) |
| #define | vrgather_vv_u16mf2_m(...) __riscv_vrgather_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u16mf4(...) __riscv_vrgather_vv_u16mf4(__VA_ARGS__) |
| #define | vrgather_vv_u16mf4_m(...) __riscv_vrgather_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u32m1(...) __riscv_vrgather_vv_u32m1(__VA_ARGS__) |
| #define | vrgather_vv_u32m1_m(...) __riscv_vrgather_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u32m2(...) __riscv_vrgather_vv_u32m2(__VA_ARGS__) |
| #define | vrgather_vv_u32m2_m(...) __riscv_vrgather_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u32m4(...) __riscv_vrgather_vv_u32m4(__VA_ARGS__) |
| #define | vrgather_vv_u32m4_m(...) __riscv_vrgather_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u32m8(...) __riscv_vrgather_vv_u32m8(__VA_ARGS__) |
| #define | vrgather_vv_u32m8_m(...) __riscv_vrgather_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u32mf2(...) __riscv_vrgather_vv_u32mf2(__VA_ARGS__) |
| #define | vrgather_vv_u32mf2_m(...) __riscv_vrgather_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u64m1(...) __riscv_vrgather_vv_u64m1(__VA_ARGS__) |
| #define | vrgather_vv_u64m1_m(...) __riscv_vrgather_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u64m2(...) __riscv_vrgather_vv_u64m2(__VA_ARGS__) |
| #define | vrgather_vv_u64m2_m(...) __riscv_vrgather_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u64m4(...) __riscv_vrgather_vv_u64m4(__VA_ARGS__) |
| #define | vrgather_vv_u64m4_m(...) __riscv_vrgather_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u64m8(...) __riscv_vrgather_vv_u64m8(__VA_ARGS__) |
| #define | vrgather_vv_u64m8_m(...) __riscv_vrgather_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u8m1(...) __riscv_vrgather_vv_u8m1(__VA_ARGS__) |
| #define | vrgather_vv_u8m1_m(...) __riscv_vrgather_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u8m2(...) __riscv_vrgather_vv_u8m2(__VA_ARGS__) |
| #define | vrgather_vv_u8m2_m(...) __riscv_vrgather_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u8m4(...) __riscv_vrgather_vv_u8m4(__VA_ARGS__) |
| #define | vrgather_vv_u8m4_m(...) __riscv_vrgather_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u8m8(...) __riscv_vrgather_vv_u8m8(__VA_ARGS__) |
| #define | vrgather_vv_u8m8_m(...) __riscv_vrgather_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u8mf2(...) __riscv_vrgather_vv_u8mf2(__VA_ARGS__) |
| #define | vrgather_vv_u8mf2_m(...) __riscv_vrgather_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u8mf4(...) __riscv_vrgather_vv_u8mf4(__VA_ARGS__) |
| #define | vrgather_vv_u8mf4_m(...) __riscv_vrgather_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vv_u8mf8(...) __riscv_vrgather_vv_u8mf8(__VA_ARGS__) |
| #define | vrgather_vv_u8mf8_m(...) __riscv_vrgather_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f16m1(...) __riscv_vrgather_vx_f16m1(__VA_ARGS__) |
| #define | vrgather_vx_f16m1_m(...) __riscv_vrgather_vx_f16m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f16m2(...) __riscv_vrgather_vx_f16m2(__VA_ARGS__) |
| #define | vrgather_vx_f16m2_m(...) __riscv_vrgather_vx_f16m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f16m4(...) __riscv_vrgather_vx_f16m4(__VA_ARGS__) |
| #define | vrgather_vx_f16m4_m(...) __riscv_vrgather_vx_f16m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f16m8(...) __riscv_vrgather_vx_f16m8(__VA_ARGS__) |
| #define | vrgather_vx_f16m8_m(...) __riscv_vrgather_vx_f16m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f16mf2(...) __riscv_vrgather_vx_f16mf2(__VA_ARGS__) |
| #define | vrgather_vx_f16mf2_m(...) __riscv_vrgather_vx_f16mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f16mf4(...) __riscv_vrgather_vx_f16mf4(__VA_ARGS__) |
| #define | vrgather_vx_f16mf4_m(...) __riscv_vrgather_vx_f16mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f32m1(...) __riscv_vrgather_vx_f32m1(__VA_ARGS__) |
| #define | vrgather_vx_f32m1_m(...) __riscv_vrgather_vx_f32m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f32m2(...) __riscv_vrgather_vx_f32m2(__VA_ARGS__) |
| #define | vrgather_vx_f32m2_m(...) __riscv_vrgather_vx_f32m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f32m4(...) __riscv_vrgather_vx_f32m4(__VA_ARGS__) |
| #define | vrgather_vx_f32m4_m(...) __riscv_vrgather_vx_f32m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f32m8(...) __riscv_vrgather_vx_f32m8(__VA_ARGS__) |
| #define | vrgather_vx_f32m8_m(...) __riscv_vrgather_vx_f32m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f32mf2(...) __riscv_vrgather_vx_f32mf2(__VA_ARGS__) |
| #define | vrgather_vx_f32mf2_m(...) __riscv_vrgather_vx_f32mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f64m1(...) __riscv_vrgather_vx_f64m1(__VA_ARGS__) |
| #define | vrgather_vx_f64m1_m(...) __riscv_vrgather_vx_f64m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f64m2(...) __riscv_vrgather_vx_f64m2(__VA_ARGS__) |
| #define | vrgather_vx_f64m2_m(...) __riscv_vrgather_vx_f64m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f64m4(...) __riscv_vrgather_vx_f64m4(__VA_ARGS__) |
| #define | vrgather_vx_f64m4_m(...) __riscv_vrgather_vx_f64m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_f64m8(...) __riscv_vrgather_vx_f64m8(__VA_ARGS__) |
| #define | vrgather_vx_f64m8_m(...) __riscv_vrgather_vx_f64m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i16m1(...) __riscv_vrgather_vx_i16m1(__VA_ARGS__) |
| #define | vrgather_vx_i16m1_m(...) __riscv_vrgather_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i16m2(...) __riscv_vrgather_vx_i16m2(__VA_ARGS__) |
| #define | vrgather_vx_i16m2_m(...) __riscv_vrgather_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i16m4(...) __riscv_vrgather_vx_i16m4(__VA_ARGS__) |
| #define | vrgather_vx_i16m4_m(...) __riscv_vrgather_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i16m8(...) __riscv_vrgather_vx_i16m8(__VA_ARGS__) |
| #define | vrgather_vx_i16m8_m(...) __riscv_vrgather_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i16mf2(...) __riscv_vrgather_vx_i16mf2(__VA_ARGS__) |
| #define | vrgather_vx_i16mf2_m(...) __riscv_vrgather_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i16mf4(...) __riscv_vrgather_vx_i16mf4(__VA_ARGS__) |
| #define | vrgather_vx_i16mf4_m(...) __riscv_vrgather_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i32m1(...) __riscv_vrgather_vx_i32m1(__VA_ARGS__) |
| #define | vrgather_vx_i32m1_m(...) __riscv_vrgather_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i32m2(...) __riscv_vrgather_vx_i32m2(__VA_ARGS__) |
| #define | vrgather_vx_i32m2_m(...) __riscv_vrgather_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i32m4(...) __riscv_vrgather_vx_i32m4(__VA_ARGS__) |
| #define | vrgather_vx_i32m4_m(...) __riscv_vrgather_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i32m8(...) __riscv_vrgather_vx_i32m8(__VA_ARGS__) |
| #define | vrgather_vx_i32m8_m(...) __riscv_vrgather_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i32mf2(...) __riscv_vrgather_vx_i32mf2(__VA_ARGS__) |
| #define | vrgather_vx_i32mf2_m(...) __riscv_vrgather_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i64m1(...) __riscv_vrgather_vx_i64m1(__VA_ARGS__) |
| #define | vrgather_vx_i64m1_m(...) __riscv_vrgather_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i64m2(...) __riscv_vrgather_vx_i64m2(__VA_ARGS__) |
| #define | vrgather_vx_i64m2_m(...) __riscv_vrgather_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i64m4(...) __riscv_vrgather_vx_i64m4(__VA_ARGS__) |
| #define | vrgather_vx_i64m4_m(...) __riscv_vrgather_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i64m8(...) __riscv_vrgather_vx_i64m8(__VA_ARGS__) |
| #define | vrgather_vx_i64m8_m(...) __riscv_vrgather_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i8m1(...) __riscv_vrgather_vx_i8m1(__VA_ARGS__) |
| #define | vrgather_vx_i8m1_m(...) __riscv_vrgather_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i8m2(...) __riscv_vrgather_vx_i8m2(__VA_ARGS__) |
| #define | vrgather_vx_i8m2_m(...) __riscv_vrgather_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i8m4(...) __riscv_vrgather_vx_i8m4(__VA_ARGS__) |
| #define | vrgather_vx_i8m4_m(...) __riscv_vrgather_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i8m8(...) __riscv_vrgather_vx_i8m8(__VA_ARGS__) |
| #define | vrgather_vx_i8m8_m(...) __riscv_vrgather_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i8mf2(...) __riscv_vrgather_vx_i8mf2(__VA_ARGS__) |
| #define | vrgather_vx_i8mf2_m(...) __riscv_vrgather_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i8mf4(...) __riscv_vrgather_vx_i8mf4(__VA_ARGS__) |
| #define | vrgather_vx_i8mf4_m(...) __riscv_vrgather_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_i8mf8(...) __riscv_vrgather_vx_i8mf8(__VA_ARGS__) |
| #define | vrgather_vx_i8mf8_m(...) __riscv_vrgather_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u16m1(...) __riscv_vrgather_vx_u16m1(__VA_ARGS__) |
| #define | vrgather_vx_u16m1_m(...) __riscv_vrgather_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u16m2(...) __riscv_vrgather_vx_u16m2(__VA_ARGS__) |
| #define | vrgather_vx_u16m2_m(...) __riscv_vrgather_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u16m4(...) __riscv_vrgather_vx_u16m4(__VA_ARGS__) |
| #define | vrgather_vx_u16m4_m(...) __riscv_vrgather_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u16m8(...) __riscv_vrgather_vx_u16m8(__VA_ARGS__) |
| #define | vrgather_vx_u16m8_m(...) __riscv_vrgather_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u16mf2(...) __riscv_vrgather_vx_u16mf2(__VA_ARGS__) |
| #define | vrgather_vx_u16mf2_m(...) __riscv_vrgather_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u16mf4(...) __riscv_vrgather_vx_u16mf4(__VA_ARGS__) |
| #define | vrgather_vx_u16mf4_m(...) __riscv_vrgather_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u32m1(...) __riscv_vrgather_vx_u32m1(__VA_ARGS__) |
| #define | vrgather_vx_u32m1_m(...) __riscv_vrgather_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u32m2(...) __riscv_vrgather_vx_u32m2(__VA_ARGS__) |
| #define | vrgather_vx_u32m2_m(...) __riscv_vrgather_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u32m4(...) __riscv_vrgather_vx_u32m4(__VA_ARGS__) |
| #define | vrgather_vx_u32m4_m(...) __riscv_vrgather_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u32m8(...) __riscv_vrgather_vx_u32m8(__VA_ARGS__) |
| #define | vrgather_vx_u32m8_m(...) __riscv_vrgather_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u32mf2(...) __riscv_vrgather_vx_u32mf2(__VA_ARGS__) |
| #define | vrgather_vx_u32mf2_m(...) __riscv_vrgather_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u64m1(...) __riscv_vrgather_vx_u64m1(__VA_ARGS__) |
| #define | vrgather_vx_u64m1_m(...) __riscv_vrgather_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u64m2(...) __riscv_vrgather_vx_u64m2(__VA_ARGS__) |
| #define | vrgather_vx_u64m2_m(...) __riscv_vrgather_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u64m4(...) __riscv_vrgather_vx_u64m4(__VA_ARGS__) |
| #define | vrgather_vx_u64m4_m(...) __riscv_vrgather_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u64m8(...) __riscv_vrgather_vx_u64m8(__VA_ARGS__) |
| #define | vrgather_vx_u64m8_m(...) __riscv_vrgather_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u8m1(...) __riscv_vrgather_vx_u8m1(__VA_ARGS__) |
| #define | vrgather_vx_u8m1_m(...) __riscv_vrgather_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u8m2(...) __riscv_vrgather_vx_u8m2(__VA_ARGS__) |
| #define | vrgather_vx_u8m2_m(...) __riscv_vrgather_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u8m4(...) __riscv_vrgather_vx_u8m4(__VA_ARGS__) |
| #define | vrgather_vx_u8m4_m(...) __riscv_vrgather_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u8m8(...) __riscv_vrgather_vx_u8m8(__VA_ARGS__) |
| #define | vrgather_vx_u8m8_m(...) __riscv_vrgather_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u8mf2(...) __riscv_vrgather_vx_u8mf2(__VA_ARGS__) |
| #define | vrgather_vx_u8mf2_m(...) __riscv_vrgather_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u8mf4(...) __riscv_vrgather_vx_u8mf4(__VA_ARGS__) |
| #define | vrgather_vx_u8mf4_m(...) __riscv_vrgather_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vrgather_vx_u8mf8(...) __riscv_vrgather_vx_u8mf8(__VA_ARGS__) |
| #define | vrgather_vx_u8mf8_m(...) __riscv_vrgather_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16m1(...) __riscv_vrgatherei16_vv_f16m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16m1_m(...) __riscv_vrgatherei16_vv_f16m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16m2(...) __riscv_vrgatherei16_vv_f16m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16m2_m(...) __riscv_vrgatherei16_vv_f16m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16m4(...) __riscv_vrgatherei16_vv_f16m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16m4_m(...) __riscv_vrgatherei16_vv_f16m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16m8(...) __riscv_vrgatherei16_vv_f16m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16m8_m(...) __riscv_vrgatherei16_vv_f16m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16mf2(...) __riscv_vrgatherei16_vv_f16mf2(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16mf2_m(...) __riscv_vrgatherei16_vv_f16mf2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16mf4(...) __riscv_vrgatherei16_vv_f16mf4(__VA_ARGS__) |
| #define | vrgatherei16_vv_f16mf4_m(...) __riscv_vrgatherei16_vv_f16mf4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32m1(...) __riscv_vrgatherei16_vv_f32m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32m1_m(...) __riscv_vrgatherei16_vv_f32m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32m2(...) __riscv_vrgatherei16_vv_f32m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32m2_m(...) __riscv_vrgatherei16_vv_f32m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32m4(...) __riscv_vrgatherei16_vv_f32m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32m4_m(...) __riscv_vrgatherei16_vv_f32m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32m8(...) __riscv_vrgatherei16_vv_f32m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32m8_m(...) __riscv_vrgatherei16_vv_f32m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32mf2(...) __riscv_vrgatherei16_vv_f32mf2(__VA_ARGS__) |
| #define | vrgatherei16_vv_f32mf2_m(...) __riscv_vrgatherei16_vv_f32mf2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f64m1(...) __riscv_vrgatherei16_vv_f64m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_f64m1_m(...) __riscv_vrgatherei16_vv_f64m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f64m2(...) __riscv_vrgatherei16_vv_f64m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_f64m2_m(...) __riscv_vrgatherei16_vv_f64m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f64m4(...) __riscv_vrgatherei16_vv_f64m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_f64m4_m(...) __riscv_vrgatherei16_vv_f64m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_f64m8(...) __riscv_vrgatherei16_vv_f64m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_f64m8_m(...) __riscv_vrgatherei16_vv_f64m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16m1(...) __riscv_vrgatherei16_vv_i16m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16m1_m(...) __riscv_vrgatherei16_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16m2(...) __riscv_vrgatherei16_vv_i16m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16m2_m(...) __riscv_vrgatherei16_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16m4(...) __riscv_vrgatherei16_vv_i16m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16m4_m(...) __riscv_vrgatherei16_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16m8(...) __riscv_vrgatherei16_vv_i16m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16m8_m(...) __riscv_vrgatherei16_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16mf2(...) __riscv_vrgatherei16_vv_i16mf2(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16mf2_m(...) __riscv_vrgatherei16_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16mf4(...) __riscv_vrgatherei16_vv_i16mf4(__VA_ARGS__) |
| #define | vrgatherei16_vv_i16mf4_m(...) __riscv_vrgatherei16_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32m1(...) __riscv_vrgatherei16_vv_i32m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32m1_m(...) __riscv_vrgatherei16_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32m2(...) __riscv_vrgatherei16_vv_i32m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32m2_m(...) __riscv_vrgatherei16_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32m4(...) __riscv_vrgatherei16_vv_i32m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32m4_m(...) __riscv_vrgatherei16_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32m8(...) __riscv_vrgatherei16_vv_i32m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32m8_m(...) __riscv_vrgatherei16_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32mf2(...) __riscv_vrgatherei16_vv_i32mf2(__VA_ARGS__) |
| #define | vrgatherei16_vv_i32mf2_m(...) __riscv_vrgatherei16_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i64m1(...) __riscv_vrgatherei16_vv_i64m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_i64m1_m(...) __riscv_vrgatherei16_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i64m2(...) __riscv_vrgatherei16_vv_i64m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_i64m2_m(...) __riscv_vrgatherei16_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i64m4(...) __riscv_vrgatherei16_vv_i64m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_i64m4_m(...) __riscv_vrgatherei16_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i64m8(...) __riscv_vrgatherei16_vv_i64m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_i64m8_m(...) __riscv_vrgatherei16_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8m1(...) __riscv_vrgatherei16_vv_i8m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8m1_m(...) __riscv_vrgatherei16_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8m2(...) __riscv_vrgatherei16_vv_i8m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8m2_m(...) __riscv_vrgatherei16_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8m4(...) __riscv_vrgatherei16_vv_i8m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8m4_m(...) __riscv_vrgatherei16_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8mf2(...) __riscv_vrgatherei16_vv_i8mf2(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8mf2_m(...) __riscv_vrgatherei16_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8mf4(...) __riscv_vrgatherei16_vv_i8mf4(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8mf4_m(...) __riscv_vrgatherei16_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8mf8(...) __riscv_vrgatherei16_vv_i8mf8(__VA_ARGS__) |
| #define | vrgatherei16_vv_i8mf8_m(...) __riscv_vrgatherei16_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16m1(...) __riscv_vrgatherei16_vv_u16m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16m1_m(...) __riscv_vrgatherei16_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16m2(...) __riscv_vrgatherei16_vv_u16m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16m2_m(...) __riscv_vrgatherei16_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16m4(...) __riscv_vrgatherei16_vv_u16m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16m4_m(...) __riscv_vrgatherei16_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16m8(...) __riscv_vrgatherei16_vv_u16m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16m8_m(...) __riscv_vrgatherei16_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16mf2(...) __riscv_vrgatherei16_vv_u16mf2(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16mf2_m(...) __riscv_vrgatherei16_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16mf4(...) __riscv_vrgatherei16_vv_u16mf4(__VA_ARGS__) |
| #define | vrgatherei16_vv_u16mf4_m(...) __riscv_vrgatherei16_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32m1(...) __riscv_vrgatherei16_vv_u32m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32m1_m(...) __riscv_vrgatherei16_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32m2(...) __riscv_vrgatherei16_vv_u32m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32m2_m(...) __riscv_vrgatherei16_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32m4(...) __riscv_vrgatherei16_vv_u32m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32m4_m(...) __riscv_vrgatherei16_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32m8(...) __riscv_vrgatherei16_vv_u32m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32m8_m(...) __riscv_vrgatherei16_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32mf2(...) __riscv_vrgatherei16_vv_u32mf2(__VA_ARGS__) |
| #define | vrgatherei16_vv_u32mf2_m(...) __riscv_vrgatherei16_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u64m1(...) __riscv_vrgatherei16_vv_u64m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_u64m1_m(...) __riscv_vrgatherei16_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u64m2(...) __riscv_vrgatherei16_vv_u64m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_u64m2_m(...) __riscv_vrgatherei16_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u64m4(...) __riscv_vrgatherei16_vv_u64m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_u64m4_m(...) __riscv_vrgatherei16_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u64m8(...) __riscv_vrgatherei16_vv_u64m8(__VA_ARGS__) |
| #define | vrgatherei16_vv_u64m8_m(...) __riscv_vrgatherei16_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8m1(...) __riscv_vrgatherei16_vv_u8m1(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8m1_m(...) __riscv_vrgatherei16_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8m2(...) __riscv_vrgatherei16_vv_u8m2(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8m2_m(...) __riscv_vrgatherei16_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8m4(...) __riscv_vrgatherei16_vv_u8m4(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8m4_m(...) __riscv_vrgatherei16_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8mf2(...) __riscv_vrgatherei16_vv_u8mf2(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8mf2_m(...) __riscv_vrgatherei16_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8mf4(...) __riscv_vrgatherei16_vv_u8mf4(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8mf4_m(...) __riscv_vrgatherei16_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8mf8(...) __riscv_vrgatherei16_vv_u8mf8(__VA_ARGS__) |
| #define | vrgatherei16_vv_u8mf8_m(...) __riscv_vrgatherei16_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i16m1(...) __riscv_vrsub_vx_i16m1(__VA_ARGS__) |
| #define | vrsub_vx_i16m1_m(...) __riscv_vrsub_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i16m2(...) __riscv_vrsub_vx_i16m2(__VA_ARGS__) |
| #define | vrsub_vx_i16m2_m(...) __riscv_vrsub_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i16m4(...) __riscv_vrsub_vx_i16m4(__VA_ARGS__) |
| #define | vrsub_vx_i16m4_m(...) __riscv_vrsub_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i16m8(...) __riscv_vrsub_vx_i16m8(__VA_ARGS__) |
| #define | vrsub_vx_i16m8_m(...) __riscv_vrsub_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i16mf2(...) __riscv_vrsub_vx_i16mf2(__VA_ARGS__) |
| #define | vrsub_vx_i16mf2_m(...) __riscv_vrsub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i16mf4(...) __riscv_vrsub_vx_i16mf4(__VA_ARGS__) |
| #define | vrsub_vx_i16mf4_m(...) __riscv_vrsub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i32m1(...) __riscv_vrsub_vx_i32m1(__VA_ARGS__) |
| #define | vrsub_vx_i32m1_m(...) __riscv_vrsub_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i32m2(...) __riscv_vrsub_vx_i32m2(__VA_ARGS__) |
| #define | vrsub_vx_i32m2_m(...) __riscv_vrsub_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i32m4(...) __riscv_vrsub_vx_i32m4(__VA_ARGS__) |
| #define | vrsub_vx_i32m4_m(...) __riscv_vrsub_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i32m8(...) __riscv_vrsub_vx_i32m8(__VA_ARGS__) |
| #define | vrsub_vx_i32m8_m(...) __riscv_vrsub_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i32mf2(...) __riscv_vrsub_vx_i32mf2(__VA_ARGS__) |
| #define | vrsub_vx_i32mf2_m(...) __riscv_vrsub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i64m1(...) __riscv_vrsub_vx_i64m1(__VA_ARGS__) |
| #define | vrsub_vx_i64m1_m(...) __riscv_vrsub_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i64m2(...) __riscv_vrsub_vx_i64m2(__VA_ARGS__) |
| #define | vrsub_vx_i64m2_m(...) __riscv_vrsub_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i64m4(...) __riscv_vrsub_vx_i64m4(__VA_ARGS__) |
| #define | vrsub_vx_i64m4_m(...) __riscv_vrsub_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i64m8(...) __riscv_vrsub_vx_i64m8(__VA_ARGS__) |
| #define | vrsub_vx_i64m8_m(...) __riscv_vrsub_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i8m1(...) __riscv_vrsub_vx_i8m1(__VA_ARGS__) |
| #define | vrsub_vx_i8m1_m(...) __riscv_vrsub_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i8m2(...) __riscv_vrsub_vx_i8m2(__VA_ARGS__) |
| #define | vrsub_vx_i8m2_m(...) __riscv_vrsub_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i8m4(...) __riscv_vrsub_vx_i8m4(__VA_ARGS__) |
| #define | vrsub_vx_i8m4_m(...) __riscv_vrsub_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i8m8(...) __riscv_vrsub_vx_i8m8(__VA_ARGS__) |
| #define | vrsub_vx_i8m8_m(...) __riscv_vrsub_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i8mf2(...) __riscv_vrsub_vx_i8mf2(__VA_ARGS__) |
| #define | vrsub_vx_i8mf2_m(...) __riscv_vrsub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i8mf4(...) __riscv_vrsub_vx_i8mf4(__VA_ARGS__) |
| #define | vrsub_vx_i8mf4_m(...) __riscv_vrsub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_i8mf8(...) __riscv_vrsub_vx_i8mf8(__VA_ARGS__) |
| #define | vrsub_vx_i8mf8_m(...) __riscv_vrsub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u16m1(...) __riscv_vrsub_vx_u16m1(__VA_ARGS__) |
| #define | vrsub_vx_u16m1_m(...) __riscv_vrsub_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u16m2(...) __riscv_vrsub_vx_u16m2(__VA_ARGS__) |
| #define | vrsub_vx_u16m2_m(...) __riscv_vrsub_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u16m4(...) __riscv_vrsub_vx_u16m4(__VA_ARGS__) |
| #define | vrsub_vx_u16m4_m(...) __riscv_vrsub_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u16m8(...) __riscv_vrsub_vx_u16m8(__VA_ARGS__) |
| #define | vrsub_vx_u16m8_m(...) __riscv_vrsub_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u16mf2(...) __riscv_vrsub_vx_u16mf2(__VA_ARGS__) |
| #define | vrsub_vx_u16mf2_m(...) __riscv_vrsub_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u16mf4(...) __riscv_vrsub_vx_u16mf4(__VA_ARGS__) |
| #define | vrsub_vx_u16mf4_m(...) __riscv_vrsub_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u32m1(...) __riscv_vrsub_vx_u32m1(__VA_ARGS__) |
| #define | vrsub_vx_u32m1_m(...) __riscv_vrsub_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u32m2(...) __riscv_vrsub_vx_u32m2(__VA_ARGS__) |
| #define | vrsub_vx_u32m2_m(...) __riscv_vrsub_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u32m4(...) __riscv_vrsub_vx_u32m4(__VA_ARGS__) |
| #define | vrsub_vx_u32m4_m(...) __riscv_vrsub_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u32m8(...) __riscv_vrsub_vx_u32m8(__VA_ARGS__) |
| #define | vrsub_vx_u32m8_m(...) __riscv_vrsub_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u32mf2(...) __riscv_vrsub_vx_u32mf2(__VA_ARGS__) |
| #define | vrsub_vx_u32mf2_m(...) __riscv_vrsub_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u64m1(...) __riscv_vrsub_vx_u64m1(__VA_ARGS__) |
| #define | vrsub_vx_u64m1_m(...) __riscv_vrsub_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u64m2(...) __riscv_vrsub_vx_u64m2(__VA_ARGS__) |
| #define | vrsub_vx_u64m2_m(...) __riscv_vrsub_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u64m4(...) __riscv_vrsub_vx_u64m4(__VA_ARGS__) |
| #define | vrsub_vx_u64m4_m(...) __riscv_vrsub_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u64m8(...) __riscv_vrsub_vx_u64m8(__VA_ARGS__) |
| #define | vrsub_vx_u64m8_m(...) __riscv_vrsub_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u8m1(...) __riscv_vrsub_vx_u8m1(__VA_ARGS__) |
| #define | vrsub_vx_u8m1_m(...) __riscv_vrsub_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u8m2(...) __riscv_vrsub_vx_u8m2(__VA_ARGS__) |
| #define | vrsub_vx_u8m2_m(...) __riscv_vrsub_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u8m4(...) __riscv_vrsub_vx_u8m4(__VA_ARGS__) |
| #define | vrsub_vx_u8m4_m(...) __riscv_vrsub_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u8m8(...) __riscv_vrsub_vx_u8m8(__VA_ARGS__) |
| #define | vrsub_vx_u8m8_m(...) __riscv_vrsub_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u8mf2(...) __riscv_vrsub_vx_u8mf2(__VA_ARGS__) |
| #define | vrsub_vx_u8mf2_m(...) __riscv_vrsub_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u8mf4(...) __riscv_vrsub_vx_u8mf4(__VA_ARGS__) |
| #define | vrsub_vx_u8mf4_m(...) __riscv_vrsub_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vrsub_vx_u8mf8(...) __riscv_vrsub_vx_u8mf8(__VA_ARGS__) |
| #define | vrsub_vx_u8mf8_m(...) __riscv_vrsub_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i16m1(...) __riscv_vsadd_vv_i16m1(__VA_ARGS__) |
| #define | vsadd_vv_i16m1_m(...) __riscv_vsadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i16m2(...) __riscv_vsadd_vv_i16m2(__VA_ARGS__) |
| #define | vsadd_vv_i16m2_m(...) __riscv_vsadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i16m4(...) __riscv_vsadd_vv_i16m4(__VA_ARGS__) |
| #define | vsadd_vv_i16m4_m(...) __riscv_vsadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i16m8(...) __riscv_vsadd_vv_i16m8(__VA_ARGS__) |
| #define | vsadd_vv_i16m8_m(...) __riscv_vsadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i16mf2(...) __riscv_vsadd_vv_i16mf2(__VA_ARGS__) |
| #define | vsadd_vv_i16mf2_m(...) __riscv_vsadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i16mf4(...) __riscv_vsadd_vv_i16mf4(__VA_ARGS__) |
| #define | vsadd_vv_i16mf4_m(...) __riscv_vsadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i32m1(...) __riscv_vsadd_vv_i32m1(__VA_ARGS__) |
| #define | vsadd_vv_i32m1_m(...) __riscv_vsadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i32m2(...) __riscv_vsadd_vv_i32m2(__VA_ARGS__) |
| #define | vsadd_vv_i32m2_m(...) __riscv_vsadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i32m4(...) __riscv_vsadd_vv_i32m4(__VA_ARGS__) |
| #define | vsadd_vv_i32m4_m(...) __riscv_vsadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i32m8(...) __riscv_vsadd_vv_i32m8(__VA_ARGS__) |
| #define | vsadd_vv_i32m8_m(...) __riscv_vsadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i32mf2(...) __riscv_vsadd_vv_i32mf2(__VA_ARGS__) |
| #define | vsadd_vv_i32mf2_m(...) __riscv_vsadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i64m1(...) __riscv_vsadd_vv_i64m1(__VA_ARGS__) |
| #define | vsadd_vv_i64m1_m(...) __riscv_vsadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i64m2(...) __riscv_vsadd_vv_i64m2(__VA_ARGS__) |
| #define | vsadd_vv_i64m2_m(...) __riscv_vsadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i64m4(...) __riscv_vsadd_vv_i64m4(__VA_ARGS__) |
| #define | vsadd_vv_i64m4_m(...) __riscv_vsadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i64m8(...) __riscv_vsadd_vv_i64m8(__VA_ARGS__) |
| #define | vsadd_vv_i64m8_m(...) __riscv_vsadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i8m1(...) __riscv_vsadd_vv_i8m1(__VA_ARGS__) |
| #define | vsadd_vv_i8m1_m(...) __riscv_vsadd_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i8m2(...) __riscv_vsadd_vv_i8m2(__VA_ARGS__) |
| #define | vsadd_vv_i8m2_m(...) __riscv_vsadd_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i8m4(...) __riscv_vsadd_vv_i8m4(__VA_ARGS__) |
| #define | vsadd_vv_i8m4_m(...) __riscv_vsadd_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i8m8(...) __riscv_vsadd_vv_i8m8(__VA_ARGS__) |
| #define | vsadd_vv_i8m8_m(...) __riscv_vsadd_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i8mf2(...) __riscv_vsadd_vv_i8mf2(__VA_ARGS__) |
| #define | vsadd_vv_i8mf2_m(...) __riscv_vsadd_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i8mf4(...) __riscv_vsadd_vv_i8mf4(__VA_ARGS__) |
| #define | vsadd_vv_i8mf4_m(...) __riscv_vsadd_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vsadd_vv_i8mf8(...) __riscv_vsadd_vv_i8mf8(__VA_ARGS__) |
| #define | vsadd_vv_i8mf8_m(...) __riscv_vsadd_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i16m1(...) __riscv_vsadd_vx_i16m1(__VA_ARGS__) |
| #define | vsadd_vx_i16m1_m(...) __riscv_vsadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i16m2(...) __riscv_vsadd_vx_i16m2(__VA_ARGS__) |
| #define | vsadd_vx_i16m2_m(...) __riscv_vsadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i16m4(...) __riscv_vsadd_vx_i16m4(__VA_ARGS__) |
| #define | vsadd_vx_i16m4_m(...) __riscv_vsadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i16m8(...) __riscv_vsadd_vx_i16m8(__VA_ARGS__) |
| #define | vsadd_vx_i16m8_m(...) __riscv_vsadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i16mf2(...) __riscv_vsadd_vx_i16mf2(__VA_ARGS__) |
| #define | vsadd_vx_i16mf2_m(...) __riscv_vsadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i16mf4(...) __riscv_vsadd_vx_i16mf4(__VA_ARGS__) |
| #define | vsadd_vx_i16mf4_m(...) __riscv_vsadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i32m1(...) __riscv_vsadd_vx_i32m1(__VA_ARGS__) |
| #define | vsadd_vx_i32m1_m(...) __riscv_vsadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i32m2(...) __riscv_vsadd_vx_i32m2(__VA_ARGS__) |
| #define | vsadd_vx_i32m2_m(...) __riscv_vsadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i32m4(...) __riscv_vsadd_vx_i32m4(__VA_ARGS__) |
| #define | vsadd_vx_i32m4_m(...) __riscv_vsadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i32m8(...) __riscv_vsadd_vx_i32m8(__VA_ARGS__) |
| #define | vsadd_vx_i32m8_m(...) __riscv_vsadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i32mf2(...) __riscv_vsadd_vx_i32mf2(__VA_ARGS__) |
| #define | vsadd_vx_i32mf2_m(...) __riscv_vsadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i64m1(...) __riscv_vsadd_vx_i64m1(__VA_ARGS__) |
| #define | vsadd_vx_i64m1_m(...) __riscv_vsadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i64m2(...) __riscv_vsadd_vx_i64m2(__VA_ARGS__) |
| #define | vsadd_vx_i64m2_m(...) __riscv_vsadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i64m4(...) __riscv_vsadd_vx_i64m4(__VA_ARGS__) |
| #define | vsadd_vx_i64m4_m(...) __riscv_vsadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i64m8(...) __riscv_vsadd_vx_i64m8(__VA_ARGS__) |
| #define | vsadd_vx_i64m8_m(...) __riscv_vsadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i8m1(...) __riscv_vsadd_vx_i8m1(__VA_ARGS__) |
| #define | vsadd_vx_i8m1_m(...) __riscv_vsadd_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i8m2(...) __riscv_vsadd_vx_i8m2(__VA_ARGS__) |
| #define | vsadd_vx_i8m2_m(...) __riscv_vsadd_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i8m4(...) __riscv_vsadd_vx_i8m4(__VA_ARGS__) |
| #define | vsadd_vx_i8m4_m(...) __riscv_vsadd_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i8m8(...) __riscv_vsadd_vx_i8m8(__VA_ARGS__) |
| #define | vsadd_vx_i8m8_m(...) __riscv_vsadd_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i8mf2(...) __riscv_vsadd_vx_i8mf2(__VA_ARGS__) |
| #define | vsadd_vx_i8mf2_m(...) __riscv_vsadd_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i8mf4(...) __riscv_vsadd_vx_i8mf4(__VA_ARGS__) |
| #define | vsadd_vx_i8mf4_m(...) __riscv_vsadd_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vsadd_vx_i8mf8(...) __riscv_vsadd_vx_i8mf8(__VA_ARGS__) |
| #define | vsadd_vx_i8mf8_m(...) __riscv_vsadd_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u16m1(...) __riscv_vsaddu_vv_u16m1(__VA_ARGS__) |
| #define | vsaddu_vv_u16m1_m(...) __riscv_vsaddu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u16m2(...) __riscv_vsaddu_vv_u16m2(__VA_ARGS__) |
| #define | vsaddu_vv_u16m2_m(...) __riscv_vsaddu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u16m4(...) __riscv_vsaddu_vv_u16m4(__VA_ARGS__) |
| #define | vsaddu_vv_u16m4_m(...) __riscv_vsaddu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u16m8(...) __riscv_vsaddu_vv_u16m8(__VA_ARGS__) |
| #define | vsaddu_vv_u16m8_m(...) __riscv_vsaddu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u16mf2(...) __riscv_vsaddu_vv_u16mf2(__VA_ARGS__) |
| #define | vsaddu_vv_u16mf2_m(...) __riscv_vsaddu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u16mf4(...) __riscv_vsaddu_vv_u16mf4(__VA_ARGS__) |
| #define | vsaddu_vv_u16mf4_m(...) __riscv_vsaddu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u32m1(...) __riscv_vsaddu_vv_u32m1(__VA_ARGS__) |
| #define | vsaddu_vv_u32m1_m(...) __riscv_vsaddu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u32m2(...) __riscv_vsaddu_vv_u32m2(__VA_ARGS__) |
| #define | vsaddu_vv_u32m2_m(...) __riscv_vsaddu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u32m4(...) __riscv_vsaddu_vv_u32m4(__VA_ARGS__) |
| #define | vsaddu_vv_u32m4_m(...) __riscv_vsaddu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u32m8(...) __riscv_vsaddu_vv_u32m8(__VA_ARGS__) |
| #define | vsaddu_vv_u32m8_m(...) __riscv_vsaddu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u32mf2(...) __riscv_vsaddu_vv_u32mf2(__VA_ARGS__) |
| #define | vsaddu_vv_u32mf2_m(...) __riscv_vsaddu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u64m1(...) __riscv_vsaddu_vv_u64m1(__VA_ARGS__) |
| #define | vsaddu_vv_u64m1_m(...) __riscv_vsaddu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u64m2(...) __riscv_vsaddu_vv_u64m2(__VA_ARGS__) |
| #define | vsaddu_vv_u64m2_m(...) __riscv_vsaddu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u64m4(...) __riscv_vsaddu_vv_u64m4(__VA_ARGS__) |
| #define | vsaddu_vv_u64m4_m(...) __riscv_vsaddu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u64m8(...) __riscv_vsaddu_vv_u64m8(__VA_ARGS__) |
| #define | vsaddu_vv_u64m8_m(...) __riscv_vsaddu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u8m1(...) __riscv_vsaddu_vv_u8m1(__VA_ARGS__) |
| #define | vsaddu_vv_u8m1_m(...) __riscv_vsaddu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u8m2(...) __riscv_vsaddu_vv_u8m2(__VA_ARGS__) |
| #define | vsaddu_vv_u8m2_m(...) __riscv_vsaddu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u8m4(...) __riscv_vsaddu_vv_u8m4(__VA_ARGS__) |
| #define | vsaddu_vv_u8m4_m(...) __riscv_vsaddu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u8m8(...) __riscv_vsaddu_vv_u8m8(__VA_ARGS__) |
| #define | vsaddu_vv_u8m8_m(...) __riscv_vsaddu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u8mf2(...) __riscv_vsaddu_vv_u8mf2(__VA_ARGS__) |
| #define | vsaddu_vv_u8mf2_m(...) __riscv_vsaddu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u8mf4(...) __riscv_vsaddu_vv_u8mf4(__VA_ARGS__) |
| #define | vsaddu_vv_u8mf4_m(...) __riscv_vsaddu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vsaddu_vv_u8mf8(...) __riscv_vsaddu_vv_u8mf8(__VA_ARGS__) |
| #define | vsaddu_vv_u8mf8_m(...) __riscv_vsaddu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u16m1(...) __riscv_vsaddu_vx_u16m1(__VA_ARGS__) |
| #define | vsaddu_vx_u16m1_m(...) __riscv_vsaddu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u16m2(...) __riscv_vsaddu_vx_u16m2(__VA_ARGS__) |
| #define | vsaddu_vx_u16m2_m(...) __riscv_vsaddu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u16m4(...) __riscv_vsaddu_vx_u16m4(__VA_ARGS__) |
| #define | vsaddu_vx_u16m4_m(...) __riscv_vsaddu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u16m8(...) __riscv_vsaddu_vx_u16m8(__VA_ARGS__) |
| #define | vsaddu_vx_u16m8_m(...) __riscv_vsaddu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u16mf2(...) __riscv_vsaddu_vx_u16mf2(__VA_ARGS__) |
| #define | vsaddu_vx_u16mf2_m(...) __riscv_vsaddu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u16mf4(...) __riscv_vsaddu_vx_u16mf4(__VA_ARGS__) |
| #define | vsaddu_vx_u16mf4_m(...) __riscv_vsaddu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u32m1(...) __riscv_vsaddu_vx_u32m1(__VA_ARGS__) |
| #define | vsaddu_vx_u32m1_m(...) __riscv_vsaddu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u32m2(...) __riscv_vsaddu_vx_u32m2(__VA_ARGS__) |
| #define | vsaddu_vx_u32m2_m(...) __riscv_vsaddu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u32m4(...) __riscv_vsaddu_vx_u32m4(__VA_ARGS__) |
| #define | vsaddu_vx_u32m4_m(...) __riscv_vsaddu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u32m8(...) __riscv_vsaddu_vx_u32m8(__VA_ARGS__) |
| #define | vsaddu_vx_u32m8_m(...) __riscv_vsaddu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u32mf2(...) __riscv_vsaddu_vx_u32mf2(__VA_ARGS__) |
| #define | vsaddu_vx_u32mf2_m(...) __riscv_vsaddu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u64m1(...) __riscv_vsaddu_vx_u64m1(__VA_ARGS__) |
| #define | vsaddu_vx_u64m1_m(...) __riscv_vsaddu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u64m2(...) __riscv_vsaddu_vx_u64m2(__VA_ARGS__) |
| #define | vsaddu_vx_u64m2_m(...) __riscv_vsaddu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u64m4(...) __riscv_vsaddu_vx_u64m4(__VA_ARGS__) |
| #define | vsaddu_vx_u64m4_m(...) __riscv_vsaddu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u64m8(...) __riscv_vsaddu_vx_u64m8(__VA_ARGS__) |
| #define | vsaddu_vx_u64m8_m(...) __riscv_vsaddu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u8m1(...) __riscv_vsaddu_vx_u8m1(__VA_ARGS__) |
| #define | vsaddu_vx_u8m1_m(...) __riscv_vsaddu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u8m2(...) __riscv_vsaddu_vx_u8m2(__VA_ARGS__) |
| #define | vsaddu_vx_u8m2_m(...) __riscv_vsaddu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u8m4(...) __riscv_vsaddu_vx_u8m4(__VA_ARGS__) |
| #define | vsaddu_vx_u8m4_m(...) __riscv_vsaddu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u8m8(...) __riscv_vsaddu_vx_u8m8(__VA_ARGS__) |
| #define | vsaddu_vx_u8m8_m(...) __riscv_vsaddu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u8mf2(...) __riscv_vsaddu_vx_u8mf2(__VA_ARGS__) |
| #define | vsaddu_vx_u8mf2_m(...) __riscv_vsaddu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u8mf4(...) __riscv_vsaddu_vx_u8mf4(__VA_ARGS__) |
| #define | vsaddu_vx_u8mf4_m(...) __riscv_vsaddu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vsaddu_vx_u8mf8(...) __riscv_vsaddu_vx_u8mf8(__VA_ARGS__) |
| #define | vsaddu_vx_u8mf8_m(...) __riscv_vsaddu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vsbc_vvm_i16m1(...) __riscv_vsbc_vvm_i16m1(__VA_ARGS__) |
| #define | vsbc_vvm_i16m2(...) __riscv_vsbc_vvm_i16m2(__VA_ARGS__) |
| #define | vsbc_vvm_i16m4(...) __riscv_vsbc_vvm_i16m4(__VA_ARGS__) |
| #define | vsbc_vvm_i16m8(...) __riscv_vsbc_vvm_i16m8(__VA_ARGS__) |
| #define | vsbc_vvm_i16mf2(...) __riscv_vsbc_vvm_i16mf2(__VA_ARGS__) |
| #define | vsbc_vvm_i16mf4(...) __riscv_vsbc_vvm_i16mf4(__VA_ARGS__) |
| #define | vsbc_vvm_i32m1(...) __riscv_vsbc_vvm_i32m1(__VA_ARGS__) |
| #define | vsbc_vvm_i32m2(...) __riscv_vsbc_vvm_i32m2(__VA_ARGS__) |
| #define | vsbc_vvm_i32m4(...) __riscv_vsbc_vvm_i32m4(__VA_ARGS__) |
| #define | vsbc_vvm_i32m8(...) __riscv_vsbc_vvm_i32m8(__VA_ARGS__) |
| #define | vsbc_vvm_i32mf2(...) __riscv_vsbc_vvm_i32mf2(__VA_ARGS__) |
| #define | vsbc_vvm_i64m1(...) __riscv_vsbc_vvm_i64m1(__VA_ARGS__) |
| #define | vsbc_vvm_i64m2(...) __riscv_vsbc_vvm_i64m2(__VA_ARGS__) |
| #define | vsbc_vvm_i64m4(...) __riscv_vsbc_vvm_i64m4(__VA_ARGS__) |
| #define | vsbc_vvm_i64m8(...) __riscv_vsbc_vvm_i64m8(__VA_ARGS__) |
| #define | vsbc_vvm_i8m1(...) __riscv_vsbc_vvm_i8m1(__VA_ARGS__) |
| #define | vsbc_vvm_i8m2(...) __riscv_vsbc_vvm_i8m2(__VA_ARGS__) |
| #define | vsbc_vvm_i8m4(...) __riscv_vsbc_vvm_i8m4(__VA_ARGS__) |
| #define | vsbc_vvm_i8m8(...) __riscv_vsbc_vvm_i8m8(__VA_ARGS__) |
| #define | vsbc_vvm_i8mf2(...) __riscv_vsbc_vvm_i8mf2(__VA_ARGS__) |
| #define | vsbc_vvm_i8mf4(...) __riscv_vsbc_vvm_i8mf4(__VA_ARGS__) |
| #define | vsbc_vvm_i8mf8(...) __riscv_vsbc_vvm_i8mf8(__VA_ARGS__) |
| #define | vsbc_vvm_u16m1(...) __riscv_vsbc_vvm_u16m1(__VA_ARGS__) |
| #define | vsbc_vvm_u16m2(...) __riscv_vsbc_vvm_u16m2(__VA_ARGS__) |
| #define | vsbc_vvm_u16m4(...) __riscv_vsbc_vvm_u16m4(__VA_ARGS__) |
| #define | vsbc_vvm_u16m8(...) __riscv_vsbc_vvm_u16m8(__VA_ARGS__) |
| #define | vsbc_vvm_u16mf2(...) __riscv_vsbc_vvm_u16mf2(__VA_ARGS__) |
| #define | vsbc_vvm_u16mf4(...) __riscv_vsbc_vvm_u16mf4(__VA_ARGS__) |
| #define | vsbc_vvm_u32m1(...) __riscv_vsbc_vvm_u32m1(__VA_ARGS__) |
| #define | vsbc_vvm_u32m2(...) __riscv_vsbc_vvm_u32m2(__VA_ARGS__) |
| #define | vsbc_vvm_u32m4(...) __riscv_vsbc_vvm_u32m4(__VA_ARGS__) |
| #define | vsbc_vvm_u32m8(...) __riscv_vsbc_vvm_u32m8(__VA_ARGS__) |
| #define | vsbc_vvm_u32mf2(...) __riscv_vsbc_vvm_u32mf2(__VA_ARGS__) |
| #define | vsbc_vvm_u64m1(...) __riscv_vsbc_vvm_u64m1(__VA_ARGS__) |
| #define | vsbc_vvm_u64m2(...) __riscv_vsbc_vvm_u64m2(__VA_ARGS__) |
| #define | vsbc_vvm_u64m4(...) __riscv_vsbc_vvm_u64m4(__VA_ARGS__) |
| #define | vsbc_vvm_u64m8(...) __riscv_vsbc_vvm_u64m8(__VA_ARGS__) |
| #define | vsbc_vvm_u8m1(...) __riscv_vsbc_vvm_u8m1(__VA_ARGS__) |
| #define | vsbc_vvm_u8m2(...) __riscv_vsbc_vvm_u8m2(__VA_ARGS__) |
| #define | vsbc_vvm_u8m4(...) __riscv_vsbc_vvm_u8m4(__VA_ARGS__) |
| #define | vsbc_vvm_u8m8(...) __riscv_vsbc_vvm_u8m8(__VA_ARGS__) |
| #define | vsbc_vvm_u8mf2(...) __riscv_vsbc_vvm_u8mf2(__VA_ARGS__) |
| #define | vsbc_vvm_u8mf4(...) __riscv_vsbc_vvm_u8mf4(__VA_ARGS__) |
| #define | vsbc_vvm_u8mf8(...) __riscv_vsbc_vvm_u8mf8(__VA_ARGS__) |
| #define | vsbc_vxm_i16m1(...) __riscv_vsbc_vxm_i16m1(__VA_ARGS__) |
| #define | vsbc_vxm_i16m2(...) __riscv_vsbc_vxm_i16m2(__VA_ARGS__) |
| #define | vsbc_vxm_i16m4(...) __riscv_vsbc_vxm_i16m4(__VA_ARGS__) |
| #define | vsbc_vxm_i16m8(...) __riscv_vsbc_vxm_i16m8(__VA_ARGS__) |
| #define | vsbc_vxm_i16mf2(...) __riscv_vsbc_vxm_i16mf2(__VA_ARGS__) |
| #define | vsbc_vxm_i16mf4(...) __riscv_vsbc_vxm_i16mf4(__VA_ARGS__) |
| #define | vsbc_vxm_i32m1(...) __riscv_vsbc_vxm_i32m1(__VA_ARGS__) |
| #define | vsbc_vxm_i32m2(...) __riscv_vsbc_vxm_i32m2(__VA_ARGS__) |
| #define | vsbc_vxm_i32m4(...) __riscv_vsbc_vxm_i32m4(__VA_ARGS__) |
| #define | vsbc_vxm_i32m8(...) __riscv_vsbc_vxm_i32m8(__VA_ARGS__) |
| #define | vsbc_vxm_i32mf2(...) __riscv_vsbc_vxm_i32mf2(__VA_ARGS__) |
| #define | vsbc_vxm_i64m1(...) __riscv_vsbc_vxm_i64m1(__VA_ARGS__) |
| #define | vsbc_vxm_i64m2(...) __riscv_vsbc_vxm_i64m2(__VA_ARGS__) |
| #define | vsbc_vxm_i64m4(...) __riscv_vsbc_vxm_i64m4(__VA_ARGS__) |
| #define | vsbc_vxm_i64m8(...) __riscv_vsbc_vxm_i64m8(__VA_ARGS__) |
| #define | vsbc_vxm_i8m1(...) __riscv_vsbc_vxm_i8m1(__VA_ARGS__) |
| #define | vsbc_vxm_i8m2(...) __riscv_vsbc_vxm_i8m2(__VA_ARGS__) |
| #define | vsbc_vxm_i8m4(...) __riscv_vsbc_vxm_i8m4(__VA_ARGS__) |
| #define | vsbc_vxm_i8m8(...) __riscv_vsbc_vxm_i8m8(__VA_ARGS__) |
| #define | vsbc_vxm_i8mf2(...) __riscv_vsbc_vxm_i8mf2(__VA_ARGS__) |
| #define | vsbc_vxm_i8mf4(...) __riscv_vsbc_vxm_i8mf4(__VA_ARGS__) |
| #define | vsbc_vxm_i8mf8(...) __riscv_vsbc_vxm_i8mf8(__VA_ARGS__) |
| #define | vsbc_vxm_u16m1(...) __riscv_vsbc_vxm_u16m1(__VA_ARGS__) |
| #define | vsbc_vxm_u16m2(...) __riscv_vsbc_vxm_u16m2(__VA_ARGS__) |
| #define | vsbc_vxm_u16m4(...) __riscv_vsbc_vxm_u16m4(__VA_ARGS__) |
| #define | vsbc_vxm_u16m8(...) __riscv_vsbc_vxm_u16m8(__VA_ARGS__) |
| #define | vsbc_vxm_u16mf2(...) __riscv_vsbc_vxm_u16mf2(__VA_ARGS__) |
| #define | vsbc_vxm_u16mf4(...) __riscv_vsbc_vxm_u16mf4(__VA_ARGS__) |
| #define | vsbc_vxm_u32m1(...) __riscv_vsbc_vxm_u32m1(__VA_ARGS__) |
| #define | vsbc_vxm_u32m2(...) __riscv_vsbc_vxm_u32m2(__VA_ARGS__) |
| #define | vsbc_vxm_u32m4(...) __riscv_vsbc_vxm_u32m4(__VA_ARGS__) |
| #define | vsbc_vxm_u32m8(...) __riscv_vsbc_vxm_u32m8(__VA_ARGS__) |
| #define | vsbc_vxm_u32mf2(...) __riscv_vsbc_vxm_u32mf2(__VA_ARGS__) |
| #define | vsbc_vxm_u64m1(...) __riscv_vsbc_vxm_u64m1(__VA_ARGS__) |
| #define | vsbc_vxm_u64m2(...) __riscv_vsbc_vxm_u64m2(__VA_ARGS__) |
| #define | vsbc_vxm_u64m4(...) __riscv_vsbc_vxm_u64m4(__VA_ARGS__) |
| #define | vsbc_vxm_u64m8(...) __riscv_vsbc_vxm_u64m8(__VA_ARGS__) |
| #define | vsbc_vxm_u8m1(...) __riscv_vsbc_vxm_u8m1(__VA_ARGS__) |
| #define | vsbc_vxm_u8m2(...) __riscv_vsbc_vxm_u8m2(__VA_ARGS__) |
| #define | vsbc_vxm_u8m4(...) __riscv_vsbc_vxm_u8m4(__VA_ARGS__) |
| #define | vsbc_vxm_u8m8(...) __riscv_vsbc_vxm_u8m8(__VA_ARGS__) |
| #define | vsbc_vxm_u8mf2(...) __riscv_vsbc_vxm_u8mf2(__VA_ARGS__) |
| #define | vsbc_vxm_u8mf4(...) __riscv_vsbc_vxm_u8mf4(__VA_ARGS__) |
| #define | vsbc_vxm_u8mf8(...) __riscv_vsbc_vxm_u8mf8(__VA_ARGS__) |
| #define | vse16_v_f16m1(...) __riscv_vse16_v_f16m1(__VA_ARGS__) |
| #define | vse16_v_f16m1_m(...) __riscv_vse16_v_f16m1_m(__VA_ARGS__) |
| #define | vse16_v_f16m2(...) __riscv_vse16_v_f16m2(__VA_ARGS__) |
| #define | vse16_v_f16m2_m(...) __riscv_vse16_v_f16m2_m(__VA_ARGS__) |
| #define | vse16_v_f16m4(...) __riscv_vse16_v_f16m4(__VA_ARGS__) |
| #define | vse16_v_f16m4_m(...) __riscv_vse16_v_f16m4_m(__VA_ARGS__) |
| #define | vse16_v_f16m8(...) __riscv_vse16_v_f16m8(__VA_ARGS__) |
| #define | vse16_v_f16m8_m(...) __riscv_vse16_v_f16m8_m(__VA_ARGS__) |
| #define | vse16_v_f16mf2(...) __riscv_vse16_v_f16mf2(__VA_ARGS__) |
| #define | vse16_v_f16mf2_m(...) __riscv_vse16_v_f16mf2_m(__VA_ARGS__) |
| #define | vse16_v_f16mf4(...) __riscv_vse16_v_f16mf4(__VA_ARGS__) |
| #define | vse16_v_f16mf4_m(...) __riscv_vse16_v_f16mf4_m(__VA_ARGS__) |
| #define | vse16_v_i16m1(...) __riscv_vse16_v_i16m1(__VA_ARGS__) |
| #define | vse16_v_i16m1_m(...) __riscv_vse16_v_i16m1_m(__VA_ARGS__) |
| #define | vse16_v_i16m2(...) __riscv_vse16_v_i16m2(__VA_ARGS__) |
| #define | vse16_v_i16m2_m(...) __riscv_vse16_v_i16m2_m(__VA_ARGS__) |
| #define | vse16_v_i16m4(...) __riscv_vse16_v_i16m4(__VA_ARGS__) |
| #define | vse16_v_i16m4_m(...) __riscv_vse16_v_i16m4_m(__VA_ARGS__) |
| #define | vse16_v_i16m8(...) __riscv_vse16_v_i16m8(__VA_ARGS__) |
| #define | vse16_v_i16m8_m(...) __riscv_vse16_v_i16m8_m(__VA_ARGS__) |
| #define | vse16_v_i16mf2(...) __riscv_vse16_v_i16mf2(__VA_ARGS__) |
| #define | vse16_v_i16mf2_m(...) __riscv_vse16_v_i16mf2_m(__VA_ARGS__) |
| #define | vse16_v_i16mf4(...) __riscv_vse16_v_i16mf4(__VA_ARGS__) |
| #define | vse16_v_i16mf4_m(...) __riscv_vse16_v_i16mf4_m(__VA_ARGS__) |
| #define | vse16_v_u16m1(...) __riscv_vse16_v_u16m1(__VA_ARGS__) |
| #define | vse16_v_u16m1_m(...) __riscv_vse16_v_u16m1_m(__VA_ARGS__) |
| #define | vse16_v_u16m2(...) __riscv_vse16_v_u16m2(__VA_ARGS__) |
| #define | vse16_v_u16m2_m(...) __riscv_vse16_v_u16m2_m(__VA_ARGS__) |
| #define | vse16_v_u16m4(...) __riscv_vse16_v_u16m4(__VA_ARGS__) |
| #define | vse16_v_u16m4_m(...) __riscv_vse16_v_u16m4_m(__VA_ARGS__) |
| #define | vse16_v_u16m8(...) __riscv_vse16_v_u16m8(__VA_ARGS__) |
| #define | vse16_v_u16m8_m(...) __riscv_vse16_v_u16m8_m(__VA_ARGS__) |
| #define | vse16_v_u16mf2(...) __riscv_vse16_v_u16mf2(__VA_ARGS__) |
| #define | vse16_v_u16mf2_m(...) __riscv_vse16_v_u16mf2_m(__VA_ARGS__) |
| #define | vse16_v_u16mf4(...) __riscv_vse16_v_u16mf4(__VA_ARGS__) |
| #define | vse16_v_u16mf4_m(...) __riscv_vse16_v_u16mf4_m(__VA_ARGS__) |
| #define | vse32_v_f32m1(...) __riscv_vse32_v_f32m1(__VA_ARGS__) |
| #define | vse32_v_f32m1_m(...) __riscv_vse32_v_f32m1_m(__VA_ARGS__) |
| #define | vse32_v_f32m2(...) __riscv_vse32_v_f32m2(__VA_ARGS__) |
| #define | vse32_v_f32m2_m(...) __riscv_vse32_v_f32m2_m(__VA_ARGS__) |
| #define | vse32_v_f32m4(...) __riscv_vse32_v_f32m4(__VA_ARGS__) |
| #define | vse32_v_f32m4_m(...) __riscv_vse32_v_f32m4_m(__VA_ARGS__) |
| #define | vse32_v_f32m8(...) __riscv_vse32_v_f32m8(__VA_ARGS__) |
| #define | vse32_v_f32m8_m(...) __riscv_vse32_v_f32m8_m(__VA_ARGS__) |
| #define | vse32_v_f32mf2(...) __riscv_vse32_v_f32mf2(__VA_ARGS__) |
| #define | vse32_v_f32mf2_m(...) __riscv_vse32_v_f32mf2_m(__VA_ARGS__) |
| #define | vse32_v_i32m1(...) __riscv_vse32_v_i32m1(__VA_ARGS__) |
| #define | vse32_v_i32m1_m(...) __riscv_vse32_v_i32m1_m(__VA_ARGS__) |
| #define | vse32_v_i32m2(...) __riscv_vse32_v_i32m2(__VA_ARGS__) |
| #define | vse32_v_i32m2_m(...) __riscv_vse32_v_i32m2_m(__VA_ARGS__) |
| #define | vse32_v_i32m4(...) __riscv_vse32_v_i32m4(__VA_ARGS__) |
| #define | vse32_v_i32m4_m(...) __riscv_vse32_v_i32m4_m(__VA_ARGS__) |
| #define | vse32_v_i32m8(...) __riscv_vse32_v_i32m8(__VA_ARGS__) |
| #define | vse32_v_i32m8_m(...) __riscv_vse32_v_i32m8_m(__VA_ARGS__) |
| #define | vse32_v_i32mf2(...) __riscv_vse32_v_i32mf2(__VA_ARGS__) |
| #define | vse32_v_i32mf2_m(...) __riscv_vse32_v_i32mf2_m(__VA_ARGS__) |
| #define | vse32_v_u32m1(...) __riscv_vse32_v_u32m1(__VA_ARGS__) |
| #define | vse32_v_u32m1_m(...) __riscv_vse32_v_u32m1_m(__VA_ARGS__) |
| #define | vse32_v_u32m2(...) __riscv_vse32_v_u32m2(__VA_ARGS__) |
| #define | vse32_v_u32m2_m(...) __riscv_vse32_v_u32m2_m(__VA_ARGS__) |
| #define | vse32_v_u32m4(...) __riscv_vse32_v_u32m4(__VA_ARGS__) |
| #define | vse32_v_u32m4_m(...) __riscv_vse32_v_u32m4_m(__VA_ARGS__) |
| #define | vse32_v_u32m8(...) __riscv_vse32_v_u32m8(__VA_ARGS__) |
| #define | vse32_v_u32m8_m(...) __riscv_vse32_v_u32m8_m(__VA_ARGS__) |
| #define | vse32_v_u32mf2(...) __riscv_vse32_v_u32mf2(__VA_ARGS__) |
| #define | vse32_v_u32mf2_m(...) __riscv_vse32_v_u32mf2_m(__VA_ARGS__) |
| #define | vse64_v_f64m1(...) __riscv_vse64_v_f64m1(__VA_ARGS__) |
| #define | vse64_v_f64m1_m(...) __riscv_vse64_v_f64m1_m(__VA_ARGS__) |
| #define | vse64_v_f64m2(...) __riscv_vse64_v_f64m2(__VA_ARGS__) |
| #define | vse64_v_f64m2_m(...) __riscv_vse64_v_f64m2_m(__VA_ARGS__) |
| #define | vse64_v_f64m4(...) __riscv_vse64_v_f64m4(__VA_ARGS__) |
| #define | vse64_v_f64m4_m(...) __riscv_vse64_v_f64m4_m(__VA_ARGS__) |
| #define | vse64_v_f64m8(...) __riscv_vse64_v_f64m8(__VA_ARGS__) |
| #define | vse64_v_f64m8_m(...) __riscv_vse64_v_f64m8_m(__VA_ARGS__) |
| #define | vse64_v_i64m1(...) __riscv_vse64_v_i64m1(__VA_ARGS__) |
| #define | vse64_v_i64m1_m(...) __riscv_vse64_v_i64m1_m(__VA_ARGS__) |
| #define | vse64_v_i64m2(...) __riscv_vse64_v_i64m2(__VA_ARGS__) |
| #define | vse64_v_i64m2_m(...) __riscv_vse64_v_i64m2_m(__VA_ARGS__) |
| #define | vse64_v_i64m4(...) __riscv_vse64_v_i64m4(__VA_ARGS__) |
| #define | vse64_v_i64m4_m(...) __riscv_vse64_v_i64m4_m(__VA_ARGS__) |
| #define | vse64_v_i64m8(...) __riscv_vse64_v_i64m8(__VA_ARGS__) |
| #define | vse64_v_i64m8_m(...) __riscv_vse64_v_i64m8_m(__VA_ARGS__) |
| #define | vse64_v_u64m1(...) __riscv_vse64_v_u64m1(__VA_ARGS__) |
| #define | vse64_v_u64m1_m(...) __riscv_vse64_v_u64m1_m(__VA_ARGS__) |
| #define | vse64_v_u64m2(...) __riscv_vse64_v_u64m2(__VA_ARGS__) |
| #define | vse64_v_u64m2_m(...) __riscv_vse64_v_u64m2_m(__VA_ARGS__) |
| #define | vse64_v_u64m4(...) __riscv_vse64_v_u64m4(__VA_ARGS__) |
| #define | vse64_v_u64m4_m(...) __riscv_vse64_v_u64m4_m(__VA_ARGS__) |
| #define | vse64_v_u64m8(...) __riscv_vse64_v_u64m8(__VA_ARGS__) |
| #define | vse64_v_u64m8_m(...) __riscv_vse64_v_u64m8_m(__VA_ARGS__) |
| #define | vse8_v_i8m1(...) __riscv_vse8_v_i8m1(__VA_ARGS__) |
| #define | vse8_v_i8m1_m(...) __riscv_vse8_v_i8m1_m(__VA_ARGS__) |
| #define | vse8_v_i8m2(...) __riscv_vse8_v_i8m2(__VA_ARGS__) |
| #define | vse8_v_i8m2_m(...) __riscv_vse8_v_i8m2_m(__VA_ARGS__) |
| #define | vse8_v_i8m4(...) __riscv_vse8_v_i8m4(__VA_ARGS__) |
| #define | vse8_v_i8m4_m(...) __riscv_vse8_v_i8m4_m(__VA_ARGS__) |
| #define | vse8_v_i8m8(...) __riscv_vse8_v_i8m8(__VA_ARGS__) |
| #define | vse8_v_i8m8_m(...) __riscv_vse8_v_i8m8_m(__VA_ARGS__) |
| #define | vse8_v_i8mf2(...) __riscv_vse8_v_i8mf2(__VA_ARGS__) |
| #define | vse8_v_i8mf2_m(...) __riscv_vse8_v_i8mf2_m(__VA_ARGS__) |
| #define | vse8_v_i8mf4(...) __riscv_vse8_v_i8mf4(__VA_ARGS__) |
| #define | vse8_v_i8mf4_m(...) __riscv_vse8_v_i8mf4_m(__VA_ARGS__) |
| #define | vse8_v_i8mf8(...) __riscv_vse8_v_i8mf8(__VA_ARGS__) |
| #define | vse8_v_i8mf8_m(...) __riscv_vse8_v_i8mf8_m(__VA_ARGS__) |
| #define | vse8_v_u8m1(...) __riscv_vse8_v_u8m1(__VA_ARGS__) |
| #define | vse8_v_u8m1_m(...) __riscv_vse8_v_u8m1_m(__VA_ARGS__) |
| #define | vse8_v_u8m2(...) __riscv_vse8_v_u8m2(__VA_ARGS__) |
| #define | vse8_v_u8m2_m(...) __riscv_vse8_v_u8m2_m(__VA_ARGS__) |
| #define | vse8_v_u8m4(...) __riscv_vse8_v_u8m4(__VA_ARGS__) |
| #define | vse8_v_u8m4_m(...) __riscv_vse8_v_u8m4_m(__VA_ARGS__) |
| #define | vse8_v_u8m8(...) __riscv_vse8_v_u8m8(__VA_ARGS__) |
| #define | vse8_v_u8m8_m(...) __riscv_vse8_v_u8m8_m(__VA_ARGS__) |
| #define | vse8_v_u8mf2(...) __riscv_vse8_v_u8mf2(__VA_ARGS__) |
| #define | vse8_v_u8mf2_m(...) __riscv_vse8_v_u8mf2_m(__VA_ARGS__) |
| #define | vse8_v_u8mf4(...) __riscv_vse8_v_u8mf4(__VA_ARGS__) |
| #define | vse8_v_u8mf4_m(...) __riscv_vse8_v_u8mf4_m(__VA_ARGS__) |
| #define | vse8_v_u8mf8(...) __riscv_vse8_v_u8mf8(__VA_ARGS__) |
| #define | vse8_v_u8mf8_m(...) __riscv_vse8_v_u8mf8_m(__VA_ARGS__) |
| #define | vset_v_f16m1_f16m2(...) __riscv_vset_v_f16m1_f16m2(__VA_ARGS__) |
| #define | vset_v_f16m1_f16m4(...) __riscv_vset_v_f16m1_f16m4(__VA_ARGS__) |
| #define | vset_v_f16m1_f16m8(...) __riscv_vset_v_f16m1_f16m8(__VA_ARGS__) |
| #define | vset_v_f16m2_f16m4(...) __riscv_vset_v_f16m2_f16m4(__VA_ARGS__) |
| #define | vset_v_f16m2_f16m8(...) __riscv_vset_v_f16m2_f16m8(__VA_ARGS__) |
| #define | vset_v_f16m4_f16m8(...) __riscv_vset_v_f16m4_f16m8(__VA_ARGS__) |
| #define | vset_v_f32m1_f32m2(...) __riscv_vset_v_f32m1_f32m2(__VA_ARGS__) |
| #define | vset_v_f32m1_f32m4(...) __riscv_vset_v_f32m1_f32m4(__VA_ARGS__) |
| #define | vset_v_f32m1_f32m8(...) __riscv_vset_v_f32m1_f32m8(__VA_ARGS__) |
| #define | vset_v_f32m2_f32m4(...) __riscv_vset_v_f32m2_f32m4(__VA_ARGS__) |
| #define | vset_v_f32m2_f32m8(...) __riscv_vset_v_f32m2_f32m8(__VA_ARGS__) |
| #define | vset_v_f32m4_f32m8(...) __riscv_vset_v_f32m4_f32m8(__VA_ARGS__) |
| #define | vset_v_f64m1_f64m2(...) __riscv_vset_v_f64m1_f64m2(__VA_ARGS__) |
| #define | vset_v_f64m1_f64m4(...) __riscv_vset_v_f64m1_f64m4(__VA_ARGS__) |
| #define | vset_v_f64m1_f64m8(...) __riscv_vset_v_f64m1_f64m8(__VA_ARGS__) |
| #define | vset_v_f64m2_f64m4(...) __riscv_vset_v_f64m2_f64m4(__VA_ARGS__) |
| #define | vset_v_f64m2_f64m8(...) __riscv_vset_v_f64m2_f64m8(__VA_ARGS__) |
| #define | vset_v_f64m4_f64m8(...) __riscv_vset_v_f64m4_f64m8(__VA_ARGS__) |
| #define | vset_v_i16m1_i16m2(...) __riscv_vset_v_i16m1_i16m2(__VA_ARGS__) |
| #define | vset_v_i16m1_i16m4(...) __riscv_vset_v_i16m1_i16m4(__VA_ARGS__) |
| #define | vset_v_i16m1_i16m8(...) __riscv_vset_v_i16m1_i16m8(__VA_ARGS__) |
| #define | vset_v_i16m2_i16m4(...) __riscv_vset_v_i16m2_i16m4(__VA_ARGS__) |
| #define | vset_v_i16m2_i16m8(...) __riscv_vset_v_i16m2_i16m8(__VA_ARGS__) |
| #define | vset_v_i16m4_i16m8(...) __riscv_vset_v_i16m4_i16m8(__VA_ARGS__) |
| #define | vset_v_i32m1_i32m2(...) __riscv_vset_v_i32m1_i32m2(__VA_ARGS__) |
| #define | vset_v_i32m1_i32m4(...) __riscv_vset_v_i32m1_i32m4(__VA_ARGS__) |
| #define | vset_v_i32m1_i32m8(...) __riscv_vset_v_i32m1_i32m8(__VA_ARGS__) |
| #define | vset_v_i32m2_i32m4(...) __riscv_vset_v_i32m2_i32m4(__VA_ARGS__) |
| #define | vset_v_i32m2_i32m8(...) __riscv_vset_v_i32m2_i32m8(__VA_ARGS__) |
| #define | vset_v_i32m4_i32m8(...) __riscv_vset_v_i32m4_i32m8(__VA_ARGS__) |
| #define | vset_v_i64m1_i64m2(...) __riscv_vset_v_i64m1_i64m2(__VA_ARGS__) |
| #define | vset_v_i64m1_i64m4(...) __riscv_vset_v_i64m1_i64m4(__VA_ARGS__) |
| #define | vset_v_i64m1_i64m8(...) __riscv_vset_v_i64m1_i64m8(__VA_ARGS__) |
| #define | vset_v_i64m2_i64m4(...) __riscv_vset_v_i64m2_i64m4(__VA_ARGS__) |
| #define | vset_v_i64m2_i64m8(...) __riscv_vset_v_i64m2_i64m8(__VA_ARGS__) |
| #define | vset_v_i64m4_i64m8(...) __riscv_vset_v_i64m4_i64m8(__VA_ARGS__) |
| #define | vset_v_i8m1_i8m2(...) __riscv_vset_v_i8m1_i8m2(__VA_ARGS__) |
| #define | vset_v_i8m1_i8m4(...) __riscv_vset_v_i8m1_i8m4(__VA_ARGS__) |
| #define | vset_v_i8m1_i8m8(...) __riscv_vset_v_i8m1_i8m8(__VA_ARGS__) |
| #define | vset_v_i8m2_i8m4(...) __riscv_vset_v_i8m2_i8m4(__VA_ARGS__) |
| #define | vset_v_i8m2_i8m8(...) __riscv_vset_v_i8m2_i8m8(__VA_ARGS__) |
| #define | vset_v_i8m4_i8m8(...) __riscv_vset_v_i8m4_i8m8(__VA_ARGS__) |
| #define | vset_v_u16m1_u16m2(...) __riscv_vset_v_u16m1_u16m2(__VA_ARGS__) |
| #define | vset_v_u16m1_u16m4(...) __riscv_vset_v_u16m1_u16m4(__VA_ARGS__) |
| #define | vset_v_u16m1_u16m8(...) __riscv_vset_v_u16m1_u16m8(__VA_ARGS__) |
| #define | vset_v_u16m2_u16m4(...) __riscv_vset_v_u16m2_u16m4(__VA_ARGS__) |
| #define | vset_v_u16m2_u16m8(...) __riscv_vset_v_u16m2_u16m8(__VA_ARGS__) |
| #define | vset_v_u16m4_u16m8(...) __riscv_vset_v_u16m4_u16m8(__VA_ARGS__) |
| #define | vset_v_u32m1_u32m2(...) __riscv_vset_v_u32m1_u32m2(__VA_ARGS__) |
| #define | vset_v_u32m1_u32m4(...) __riscv_vset_v_u32m1_u32m4(__VA_ARGS__) |
| #define | vset_v_u32m1_u32m8(...) __riscv_vset_v_u32m1_u32m8(__VA_ARGS__) |
| #define | vset_v_u32m2_u32m4(...) __riscv_vset_v_u32m2_u32m4(__VA_ARGS__) |
| #define | vset_v_u32m2_u32m8(...) __riscv_vset_v_u32m2_u32m8(__VA_ARGS__) |
| #define | vset_v_u32m4_u32m8(...) __riscv_vset_v_u32m4_u32m8(__VA_ARGS__) |
| #define | vset_v_u64m1_u64m2(...) __riscv_vset_v_u64m1_u64m2(__VA_ARGS__) |
| #define | vset_v_u64m1_u64m4(...) __riscv_vset_v_u64m1_u64m4(__VA_ARGS__) |
| #define | vset_v_u64m1_u64m8(...) __riscv_vset_v_u64m1_u64m8(__VA_ARGS__) |
| #define | vset_v_u64m2_u64m4(...) __riscv_vset_v_u64m2_u64m4(__VA_ARGS__) |
| #define | vset_v_u64m2_u64m8(...) __riscv_vset_v_u64m2_u64m8(__VA_ARGS__) |
| #define | vset_v_u64m4_u64m8(...) __riscv_vset_v_u64m4_u64m8(__VA_ARGS__) |
| #define | vset_v_u8m1_u8m2(...) __riscv_vset_v_u8m1_u8m2(__VA_ARGS__) |
| #define | vset_v_u8m1_u8m4(...) __riscv_vset_v_u8m1_u8m4(__VA_ARGS__) |
| #define | vset_v_u8m1_u8m8(...) __riscv_vset_v_u8m1_u8m8(__VA_ARGS__) |
| #define | vset_v_u8m2_u8m4(...) __riscv_vset_v_u8m2_u8m4(__VA_ARGS__) |
| #define | vset_v_u8m2_u8m8(...) __riscv_vset_v_u8m2_u8m8(__VA_ARGS__) |
| #define | vset_v_u8m4_u8m8(...) __riscv_vset_v_u8m4_u8m8(__VA_ARGS__) |
| #define | vsetvl_e16m1(...) __riscv_vsetvl_e16m1(__VA_ARGS__) |
| #define | vsetvl_e16m2(...) __riscv_vsetvl_e16m2(__VA_ARGS__) |
| #define | vsetvl_e16m4(...) __riscv_vsetvl_e16m4(__VA_ARGS__) |
| #define | vsetvl_e16m8(...) __riscv_vsetvl_e16m8(__VA_ARGS__) |
| #define | vsetvl_e16mf2(...) __riscv_vsetvl_e16mf2(__VA_ARGS__) |
| #define | vsetvl_e16mf4(...) __riscv_vsetvl_e16mf4(__VA_ARGS__) |
| #define | vsetvl_e32m1(...) __riscv_vsetvl_e32m1(__VA_ARGS__) |
| #define | vsetvl_e32m2(...) __riscv_vsetvl_e32m2(__VA_ARGS__) |
| #define | vsetvl_e32m4(...) __riscv_vsetvl_e32m4(__VA_ARGS__) |
| #define | vsetvl_e32m8(...) __riscv_vsetvl_e32m8(__VA_ARGS__) |
| #define | vsetvl_e32mf2(...) __riscv_vsetvl_e32mf2(__VA_ARGS__) |
| #define | vsetvl_e64m1(...) __riscv_vsetvl_e64m1(__VA_ARGS__) |
| #define | vsetvl_e64m2(...) __riscv_vsetvl_e64m2(__VA_ARGS__) |
| #define | vsetvl_e64m4(...) __riscv_vsetvl_e64m4(__VA_ARGS__) |
| #define | vsetvl_e64m8(...) __riscv_vsetvl_e64m8(__VA_ARGS__) |
| #define | vsetvl_e8m1(...) __riscv_vsetvl_e8m1(__VA_ARGS__) |
| #define | vsetvl_e8m2(...) __riscv_vsetvl_e8m2(__VA_ARGS__) |
| #define | vsetvl_e8m4(...) __riscv_vsetvl_e8m4(__VA_ARGS__) |
| #define | vsetvl_e8m8(...) __riscv_vsetvl_e8m8(__VA_ARGS__) |
| #define | vsetvl_e8mf2(...) __riscv_vsetvl_e8mf2(__VA_ARGS__) |
| #define | vsetvl_e8mf4(...) __riscv_vsetvl_e8mf4(__VA_ARGS__) |
| #define | vsetvl_e8mf8(...) __riscv_vsetvl_e8mf8(__VA_ARGS__) |
| #define | vsetvlmax_e16m1(...) __riscv_vsetvlmax_e16m1(__VA_ARGS__) |
| #define | vsetvlmax_e16m2(...) __riscv_vsetvlmax_e16m2(__VA_ARGS__) |
| #define | vsetvlmax_e16m4(...) __riscv_vsetvlmax_e16m4(__VA_ARGS__) |
| #define | vsetvlmax_e16m8(...) __riscv_vsetvlmax_e16m8(__VA_ARGS__) |
| #define | vsetvlmax_e16mf2(...) __riscv_vsetvlmax_e16mf2(__VA_ARGS__) |
| #define | vsetvlmax_e16mf4(...) __riscv_vsetvlmax_e16mf4(__VA_ARGS__) |
| #define | vsetvlmax_e32m1(...) __riscv_vsetvlmax_e32m1(__VA_ARGS__) |
| #define | vsetvlmax_e32m2(...) __riscv_vsetvlmax_e32m2(__VA_ARGS__) |
| #define | vsetvlmax_e32m4(...) __riscv_vsetvlmax_e32m4(__VA_ARGS__) |
| #define | vsetvlmax_e32m8(...) __riscv_vsetvlmax_e32m8(__VA_ARGS__) |
| #define | vsetvlmax_e32mf2(...) __riscv_vsetvlmax_e32mf2(__VA_ARGS__) |
| #define | vsetvlmax_e64m1(...) __riscv_vsetvlmax_e64m1(__VA_ARGS__) |
| #define | vsetvlmax_e64m2(...) __riscv_vsetvlmax_e64m2(__VA_ARGS__) |
| #define | vsetvlmax_e64m4(...) __riscv_vsetvlmax_e64m4(__VA_ARGS__) |
| #define | vsetvlmax_e64m8(...) __riscv_vsetvlmax_e64m8(__VA_ARGS__) |
| #define | vsetvlmax_e8m1(...) __riscv_vsetvlmax_e8m1(__VA_ARGS__) |
| #define | vsetvlmax_e8m2(...) __riscv_vsetvlmax_e8m2(__VA_ARGS__) |
| #define | vsetvlmax_e8m4(...) __riscv_vsetvlmax_e8m4(__VA_ARGS__) |
| #define | vsetvlmax_e8m8(...) __riscv_vsetvlmax_e8m8(__VA_ARGS__) |
| #define | vsetvlmax_e8mf2(...) __riscv_vsetvlmax_e8mf2(__VA_ARGS__) |
| #define | vsetvlmax_e8mf4(...) __riscv_vsetvlmax_e8mf4(__VA_ARGS__) |
| #define | vsetvlmax_e8mf8(...) __riscv_vsetvlmax_e8mf8(__VA_ARGS__) |
| #define | vsext_vf2_i16m1(...) __riscv_vsext_vf2_i16m1(__VA_ARGS__) |
| #define | vsext_vf2_i16m1_m(...) __riscv_vsext_vf2_i16m1_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i16m2(...) __riscv_vsext_vf2_i16m2(__VA_ARGS__) |
| #define | vsext_vf2_i16m2_m(...) __riscv_vsext_vf2_i16m2_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i16m4(...) __riscv_vsext_vf2_i16m4(__VA_ARGS__) |
| #define | vsext_vf2_i16m4_m(...) __riscv_vsext_vf2_i16m4_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i16m8(...) __riscv_vsext_vf2_i16m8(__VA_ARGS__) |
| #define | vsext_vf2_i16m8_m(...) __riscv_vsext_vf2_i16m8_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i16mf2(...) __riscv_vsext_vf2_i16mf2(__VA_ARGS__) |
| #define | vsext_vf2_i16mf2_m(...) __riscv_vsext_vf2_i16mf2_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i16mf4(...) __riscv_vsext_vf2_i16mf4(__VA_ARGS__) |
| #define | vsext_vf2_i16mf4_m(...) __riscv_vsext_vf2_i16mf4_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i32m1(...) __riscv_vsext_vf2_i32m1(__VA_ARGS__) |
| #define | vsext_vf2_i32m1_m(...) __riscv_vsext_vf2_i32m1_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i32m2(...) __riscv_vsext_vf2_i32m2(__VA_ARGS__) |
| #define | vsext_vf2_i32m2_m(...) __riscv_vsext_vf2_i32m2_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i32m4(...) __riscv_vsext_vf2_i32m4(__VA_ARGS__) |
| #define | vsext_vf2_i32m4_m(...) __riscv_vsext_vf2_i32m4_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i32m8(...) __riscv_vsext_vf2_i32m8(__VA_ARGS__) |
| #define | vsext_vf2_i32m8_m(...) __riscv_vsext_vf2_i32m8_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i32mf2(...) __riscv_vsext_vf2_i32mf2(__VA_ARGS__) |
| #define | vsext_vf2_i32mf2_m(...) __riscv_vsext_vf2_i32mf2_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i64m1(...) __riscv_vsext_vf2_i64m1(__VA_ARGS__) |
| #define | vsext_vf2_i64m1_m(...) __riscv_vsext_vf2_i64m1_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i64m2(...) __riscv_vsext_vf2_i64m2(__VA_ARGS__) |
| #define | vsext_vf2_i64m2_m(...) __riscv_vsext_vf2_i64m2_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i64m4(...) __riscv_vsext_vf2_i64m4(__VA_ARGS__) |
| #define | vsext_vf2_i64m4_m(...) __riscv_vsext_vf2_i64m4_tumu(__VA_ARGS__) |
| #define | vsext_vf2_i64m8(...) __riscv_vsext_vf2_i64m8(__VA_ARGS__) |
| #define | vsext_vf2_i64m8_m(...) __riscv_vsext_vf2_i64m8_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i32m1(...) __riscv_vsext_vf4_i32m1(__VA_ARGS__) |
| #define | vsext_vf4_i32m1_m(...) __riscv_vsext_vf4_i32m1_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i32m2(...) __riscv_vsext_vf4_i32m2(__VA_ARGS__) |
| #define | vsext_vf4_i32m2_m(...) __riscv_vsext_vf4_i32m2_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i32m4(...) __riscv_vsext_vf4_i32m4(__VA_ARGS__) |
| #define | vsext_vf4_i32m4_m(...) __riscv_vsext_vf4_i32m4_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i32m8(...) __riscv_vsext_vf4_i32m8(__VA_ARGS__) |
| #define | vsext_vf4_i32m8_m(...) __riscv_vsext_vf4_i32m8_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i32mf2(...) __riscv_vsext_vf4_i32mf2(__VA_ARGS__) |
| #define | vsext_vf4_i32mf2_m(...) __riscv_vsext_vf4_i32mf2_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i64m1(...) __riscv_vsext_vf4_i64m1(__VA_ARGS__) |
| #define | vsext_vf4_i64m1_m(...) __riscv_vsext_vf4_i64m1_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i64m2(...) __riscv_vsext_vf4_i64m2(__VA_ARGS__) |
| #define | vsext_vf4_i64m2_m(...) __riscv_vsext_vf4_i64m2_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i64m4(...) __riscv_vsext_vf4_i64m4(__VA_ARGS__) |
| #define | vsext_vf4_i64m4_m(...) __riscv_vsext_vf4_i64m4_tumu(__VA_ARGS__) |
| #define | vsext_vf4_i64m8(...) __riscv_vsext_vf4_i64m8(__VA_ARGS__) |
| #define | vsext_vf4_i64m8_m(...) __riscv_vsext_vf4_i64m8_tumu(__VA_ARGS__) |
| #define | vsext_vf8_i64m1(...) __riscv_vsext_vf8_i64m1(__VA_ARGS__) |
| #define | vsext_vf8_i64m1_m(...) __riscv_vsext_vf8_i64m1_tumu(__VA_ARGS__) |
| #define | vsext_vf8_i64m2(...) __riscv_vsext_vf8_i64m2(__VA_ARGS__) |
| #define | vsext_vf8_i64m2_m(...) __riscv_vsext_vf8_i64m2_tumu(__VA_ARGS__) |
| #define | vsext_vf8_i64m4(...) __riscv_vsext_vf8_i64m4(__VA_ARGS__) |
| #define | vsext_vf8_i64m4_m(...) __riscv_vsext_vf8_i64m4_tumu(__VA_ARGS__) |
| #define | vsext_vf8_i64m8(...) __riscv_vsext_vf8_i64m8(__VA_ARGS__) |
| #define | vsext_vf8_i64m8_m(...) __riscv_vsext_vf8_i64m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i16m1(...) __riscv_vslide1down_vx_i16m1(__VA_ARGS__) |
| #define | vslide1down_vx_i16m1_m(...) __riscv_vslide1down_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i16m2(...) __riscv_vslide1down_vx_i16m2(__VA_ARGS__) |
| #define | vslide1down_vx_i16m2_m(...) __riscv_vslide1down_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i16m4(...) __riscv_vslide1down_vx_i16m4(__VA_ARGS__) |
| #define | vslide1down_vx_i16m4_m(...) __riscv_vslide1down_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i16m8(...) __riscv_vslide1down_vx_i16m8(__VA_ARGS__) |
| #define | vslide1down_vx_i16m8_m(...) __riscv_vslide1down_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i16mf2(...) __riscv_vslide1down_vx_i16mf2(__VA_ARGS__) |
| #define | vslide1down_vx_i16mf2_m(...) __riscv_vslide1down_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i16mf4(...) __riscv_vslide1down_vx_i16mf4(__VA_ARGS__) |
| #define | vslide1down_vx_i16mf4_m(...) __riscv_vslide1down_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i32m1(...) __riscv_vslide1down_vx_i32m1(__VA_ARGS__) |
| #define | vslide1down_vx_i32m1_m(...) __riscv_vslide1down_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i32m2(...) __riscv_vslide1down_vx_i32m2(__VA_ARGS__) |
| #define | vslide1down_vx_i32m2_m(...) __riscv_vslide1down_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i32m4(...) __riscv_vslide1down_vx_i32m4(__VA_ARGS__) |
| #define | vslide1down_vx_i32m4_m(...) __riscv_vslide1down_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i32m8(...) __riscv_vslide1down_vx_i32m8(__VA_ARGS__) |
| #define | vslide1down_vx_i32m8_m(...) __riscv_vslide1down_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i32mf2(...) __riscv_vslide1down_vx_i32mf2(__VA_ARGS__) |
| #define | vslide1down_vx_i32mf2_m(...) __riscv_vslide1down_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i64m1(...) __riscv_vslide1down_vx_i64m1(__VA_ARGS__) |
| #define | vslide1down_vx_i64m1_m(...) __riscv_vslide1down_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i64m2(...) __riscv_vslide1down_vx_i64m2(__VA_ARGS__) |
| #define | vslide1down_vx_i64m2_m(...) __riscv_vslide1down_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i64m4(...) __riscv_vslide1down_vx_i64m4(__VA_ARGS__) |
| #define | vslide1down_vx_i64m4_m(...) __riscv_vslide1down_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i64m8(...) __riscv_vslide1down_vx_i64m8(__VA_ARGS__) |
| #define | vslide1down_vx_i64m8_m(...) __riscv_vslide1down_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i8m1(...) __riscv_vslide1down_vx_i8m1(__VA_ARGS__) |
| #define | vslide1down_vx_i8m1_m(...) __riscv_vslide1down_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i8m2(...) __riscv_vslide1down_vx_i8m2(__VA_ARGS__) |
| #define | vslide1down_vx_i8m2_m(...) __riscv_vslide1down_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i8m4(...) __riscv_vslide1down_vx_i8m4(__VA_ARGS__) |
| #define | vslide1down_vx_i8m4_m(...) __riscv_vslide1down_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i8m8(...) __riscv_vslide1down_vx_i8m8(__VA_ARGS__) |
| #define | vslide1down_vx_i8m8_m(...) __riscv_vslide1down_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i8mf2(...) __riscv_vslide1down_vx_i8mf2(__VA_ARGS__) |
| #define | vslide1down_vx_i8mf2_m(...) __riscv_vslide1down_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i8mf4(...) __riscv_vslide1down_vx_i8mf4(__VA_ARGS__) |
| #define | vslide1down_vx_i8mf4_m(...) __riscv_vslide1down_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_i8mf8(...) __riscv_vslide1down_vx_i8mf8(__VA_ARGS__) |
| #define | vslide1down_vx_i8mf8_m(...) __riscv_vslide1down_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u16m1(...) __riscv_vslide1down_vx_u16m1(__VA_ARGS__) |
| #define | vslide1down_vx_u16m1_m(...) __riscv_vslide1down_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u16m2(...) __riscv_vslide1down_vx_u16m2(__VA_ARGS__) |
| #define | vslide1down_vx_u16m2_m(...) __riscv_vslide1down_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u16m4(...) __riscv_vslide1down_vx_u16m4(__VA_ARGS__) |
| #define | vslide1down_vx_u16m4_m(...) __riscv_vslide1down_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u16m8(...) __riscv_vslide1down_vx_u16m8(__VA_ARGS__) |
| #define | vslide1down_vx_u16m8_m(...) __riscv_vslide1down_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u16mf2(...) __riscv_vslide1down_vx_u16mf2(__VA_ARGS__) |
| #define | vslide1down_vx_u16mf2_m(...) __riscv_vslide1down_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u16mf4(...) __riscv_vslide1down_vx_u16mf4(__VA_ARGS__) |
| #define | vslide1down_vx_u16mf4_m(...) __riscv_vslide1down_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u32m1(...) __riscv_vslide1down_vx_u32m1(__VA_ARGS__) |
| #define | vslide1down_vx_u32m1_m(...) __riscv_vslide1down_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u32m2(...) __riscv_vslide1down_vx_u32m2(__VA_ARGS__) |
| #define | vslide1down_vx_u32m2_m(...) __riscv_vslide1down_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u32m4(...) __riscv_vslide1down_vx_u32m4(__VA_ARGS__) |
| #define | vslide1down_vx_u32m4_m(...) __riscv_vslide1down_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u32m8(...) __riscv_vslide1down_vx_u32m8(__VA_ARGS__) |
| #define | vslide1down_vx_u32m8_m(...) __riscv_vslide1down_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u32mf2(...) __riscv_vslide1down_vx_u32mf2(__VA_ARGS__) |
| #define | vslide1down_vx_u32mf2_m(...) __riscv_vslide1down_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u64m1(...) __riscv_vslide1down_vx_u64m1(__VA_ARGS__) |
| #define | vslide1down_vx_u64m1_m(...) __riscv_vslide1down_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u64m2(...) __riscv_vslide1down_vx_u64m2(__VA_ARGS__) |
| #define | vslide1down_vx_u64m2_m(...) __riscv_vslide1down_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u64m4(...) __riscv_vslide1down_vx_u64m4(__VA_ARGS__) |
| #define | vslide1down_vx_u64m4_m(...) __riscv_vslide1down_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u64m8(...) __riscv_vslide1down_vx_u64m8(__VA_ARGS__) |
| #define | vslide1down_vx_u64m8_m(...) __riscv_vslide1down_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u8m1(...) __riscv_vslide1down_vx_u8m1(__VA_ARGS__) |
| #define | vslide1down_vx_u8m1_m(...) __riscv_vslide1down_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u8m2(...) __riscv_vslide1down_vx_u8m2(__VA_ARGS__) |
| #define | vslide1down_vx_u8m2_m(...) __riscv_vslide1down_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u8m4(...) __riscv_vslide1down_vx_u8m4(__VA_ARGS__) |
| #define | vslide1down_vx_u8m4_m(...) __riscv_vslide1down_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u8m8(...) __riscv_vslide1down_vx_u8m8(__VA_ARGS__) |
| #define | vslide1down_vx_u8m8_m(...) __riscv_vslide1down_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u8mf2(...) __riscv_vslide1down_vx_u8mf2(__VA_ARGS__) |
| #define | vslide1down_vx_u8mf2_m(...) __riscv_vslide1down_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u8mf4(...) __riscv_vslide1down_vx_u8mf4(__VA_ARGS__) |
| #define | vslide1down_vx_u8mf4_m(...) __riscv_vslide1down_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vslide1down_vx_u8mf8(...) __riscv_vslide1down_vx_u8mf8(__VA_ARGS__) |
| #define | vslide1down_vx_u8mf8_m(...) __riscv_vslide1down_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i16m1(...) __riscv_vslide1up_vx_i16m1(__VA_ARGS__) |
| #define | vslide1up_vx_i16m1_m(...) __riscv_vslide1up_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i16m2(...) __riscv_vslide1up_vx_i16m2(__VA_ARGS__) |
| #define | vslide1up_vx_i16m2_m(...) __riscv_vslide1up_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i16m4(...) __riscv_vslide1up_vx_i16m4(__VA_ARGS__) |
| #define | vslide1up_vx_i16m4_m(...) __riscv_vslide1up_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i16m8(...) __riscv_vslide1up_vx_i16m8(__VA_ARGS__) |
| #define | vslide1up_vx_i16m8_m(...) __riscv_vslide1up_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i16mf2(...) __riscv_vslide1up_vx_i16mf2(__VA_ARGS__) |
| #define | vslide1up_vx_i16mf2_m(...) __riscv_vslide1up_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i16mf4(...) __riscv_vslide1up_vx_i16mf4(__VA_ARGS__) |
| #define | vslide1up_vx_i16mf4_m(...) __riscv_vslide1up_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i32m1(...) __riscv_vslide1up_vx_i32m1(__VA_ARGS__) |
| #define | vslide1up_vx_i32m1_m(...) __riscv_vslide1up_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i32m2(...) __riscv_vslide1up_vx_i32m2(__VA_ARGS__) |
| #define | vslide1up_vx_i32m2_m(...) __riscv_vslide1up_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i32m4(...) __riscv_vslide1up_vx_i32m4(__VA_ARGS__) |
| #define | vslide1up_vx_i32m4_m(...) __riscv_vslide1up_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i32m8(...) __riscv_vslide1up_vx_i32m8(__VA_ARGS__) |
| #define | vslide1up_vx_i32m8_m(...) __riscv_vslide1up_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i32mf2(...) __riscv_vslide1up_vx_i32mf2(__VA_ARGS__) |
| #define | vslide1up_vx_i32mf2_m(...) __riscv_vslide1up_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i64m1(...) __riscv_vslide1up_vx_i64m1(__VA_ARGS__) |
| #define | vslide1up_vx_i64m1_m(...) __riscv_vslide1up_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i64m2(...) __riscv_vslide1up_vx_i64m2(__VA_ARGS__) |
| #define | vslide1up_vx_i64m2_m(...) __riscv_vslide1up_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i64m4(...) __riscv_vslide1up_vx_i64m4(__VA_ARGS__) |
| #define | vslide1up_vx_i64m4_m(...) __riscv_vslide1up_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i64m8(...) __riscv_vslide1up_vx_i64m8(__VA_ARGS__) |
| #define | vslide1up_vx_i64m8_m(...) __riscv_vslide1up_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i8m1(...) __riscv_vslide1up_vx_i8m1(__VA_ARGS__) |
| #define | vslide1up_vx_i8m1_m(...) __riscv_vslide1up_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i8m2(...) __riscv_vslide1up_vx_i8m2(__VA_ARGS__) |
| #define | vslide1up_vx_i8m2_m(...) __riscv_vslide1up_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i8m4(...) __riscv_vslide1up_vx_i8m4(__VA_ARGS__) |
| #define | vslide1up_vx_i8m4_m(...) __riscv_vslide1up_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i8m8(...) __riscv_vslide1up_vx_i8m8(__VA_ARGS__) |
| #define | vslide1up_vx_i8m8_m(...) __riscv_vslide1up_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i8mf2(...) __riscv_vslide1up_vx_i8mf2(__VA_ARGS__) |
| #define | vslide1up_vx_i8mf2_m(...) __riscv_vslide1up_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i8mf4(...) __riscv_vslide1up_vx_i8mf4(__VA_ARGS__) |
| #define | vslide1up_vx_i8mf4_m(...) __riscv_vslide1up_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_i8mf8(...) __riscv_vslide1up_vx_i8mf8(__VA_ARGS__) |
| #define | vslide1up_vx_i8mf8_m(...) __riscv_vslide1up_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u16m1(...) __riscv_vslide1up_vx_u16m1(__VA_ARGS__) |
| #define | vslide1up_vx_u16m1_m(...) __riscv_vslide1up_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u16m2(...) __riscv_vslide1up_vx_u16m2(__VA_ARGS__) |
| #define | vslide1up_vx_u16m2_m(...) __riscv_vslide1up_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u16m4(...) __riscv_vslide1up_vx_u16m4(__VA_ARGS__) |
| #define | vslide1up_vx_u16m4_m(...) __riscv_vslide1up_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u16m8(...) __riscv_vslide1up_vx_u16m8(__VA_ARGS__) |
| #define | vslide1up_vx_u16m8_m(...) __riscv_vslide1up_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u16mf2(...) __riscv_vslide1up_vx_u16mf2(__VA_ARGS__) |
| #define | vslide1up_vx_u16mf2_m(...) __riscv_vslide1up_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u16mf4(...) __riscv_vslide1up_vx_u16mf4(__VA_ARGS__) |
| #define | vslide1up_vx_u16mf4_m(...) __riscv_vslide1up_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u32m1(...) __riscv_vslide1up_vx_u32m1(__VA_ARGS__) |
| #define | vslide1up_vx_u32m1_m(...) __riscv_vslide1up_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u32m2(...) __riscv_vslide1up_vx_u32m2(__VA_ARGS__) |
| #define | vslide1up_vx_u32m2_m(...) __riscv_vslide1up_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u32m4(...) __riscv_vslide1up_vx_u32m4(__VA_ARGS__) |
| #define | vslide1up_vx_u32m4_m(...) __riscv_vslide1up_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u32m8(...) __riscv_vslide1up_vx_u32m8(__VA_ARGS__) |
| #define | vslide1up_vx_u32m8_m(...) __riscv_vslide1up_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u32mf2(...) __riscv_vslide1up_vx_u32mf2(__VA_ARGS__) |
| #define | vslide1up_vx_u32mf2_m(...) __riscv_vslide1up_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u64m1(...) __riscv_vslide1up_vx_u64m1(__VA_ARGS__) |
| #define | vslide1up_vx_u64m1_m(...) __riscv_vslide1up_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u64m2(...) __riscv_vslide1up_vx_u64m2(__VA_ARGS__) |
| #define | vslide1up_vx_u64m2_m(...) __riscv_vslide1up_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u64m4(...) __riscv_vslide1up_vx_u64m4(__VA_ARGS__) |
| #define | vslide1up_vx_u64m4_m(...) __riscv_vslide1up_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u64m8(...) __riscv_vslide1up_vx_u64m8(__VA_ARGS__) |
| #define | vslide1up_vx_u64m8_m(...) __riscv_vslide1up_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u8m1(...) __riscv_vslide1up_vx_u8m1(__VA_ARGS__) |
| #define | vslide1up_vx_u8m1_m(...) __riscv_vslide1up_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u8m2(...) __riscv_vslide1up_vx_u8m2(__VA_ARGS__) |
| #define | vslide1up_vx_u8m2_m(...) __riscv_vslide1up_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u8m4(...) __riscv_vslide1up_vx_u8m4(__VA_ARGS__) |
| #define | vslide1up_vx_u8m4_m(...) __riscv_vslide1up_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u8m8(...) __riscv_vslide1up_vx_u8m8(__VA_ARGS__) |
| #define | vslide1up_vx_u8m8_m(...) __riscv_vslide1up_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u8mf2(...) __riscv_vslide1up_vx_u8mf2(__VA_ARGS__) |
| #define | vslide1up_vx_u8mf2_m(...) __riscv_vslide1up_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u8mf4(...) __riscv_vslide1up_vx_u8mf4(__VA_ARGS__) |
| #define | vslide1up_vx_u8mf4_m(...) __riscv_vslide1up_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vslide1up_vx_u8mf8(...) __riscv_vslide1up_vx_u8mf8(__VA_ARGS__) |
| #define | vslide1up_vx_u8mf8_m(...) __riscv_vslide1up_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f16m1(...) __riscv_vslidedown_vx_f16m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f16m1_m(...) __riscv_vslidedown_vx_f16m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f16m2(...) __riscv_vslidedown_vx_f16m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f16m2_m(...) __riscv_vslidedown_vx_f16m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f16m4(...) __riscv_vslidedown_vx_f16m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f16m4_m(...) __riscv_vslidedown_vx_f16m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f16m8(...) __riscv_vslidedown_vx_f16m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f16m8_m(...) __riscv_vslidedown_vx_f16m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f16mf2(...) __riscv_vslidedown_vx_f16mf2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f16mf2_m(...) __riscv_vslidedown_vx_f16mf2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f16mf4(...) __riscv_vslidedown_vx_f16mf4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f16mf4_m(...) __riscv_vslidedown_vx_f16mf4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f32m1(...) __riscv_vslidedown_vx_f32m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f32m1_m(...) __riscv_vslidedown_vx_f32m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f32m2(...) __riscv_vslidedown_vx_f32m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f32m2_m(...) __riscv_vslidedown_vx_f32m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f32m4(...) __riscv_vslidedown_vx_f32m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f32m4_m(...) __riscv_vslidedown_vx_f32m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f32m8(...) __riscv_vslidedown_vx_f32m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f32m8_m(...) __riscv_vslidedown_vx_f32m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f32mf2(...) __riscv_vslidedown_vx_f32mf2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f32mf2_m(...) __riscv_vslidedown_vx_f32mf2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f64m1(...) __riscv_vslidedown_vx_f64m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f64m1_m(...) __riscv_vslidedown_vx_f64m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f64m2(...) __riscv_vslidedown_vx_f64m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f64m2_m(...) __riscv_vslidedown_vx_f64m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f64m4(...) __riscv_vslidedown_vx_f64m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f64m4_m(...) __riscv_vslidedown_vx_f64m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_f64m8(...) __riscv_vslidedown_vx_f64m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_f64m8_m(...) __riscv_vslidedown_vx_f64m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i16m1(...) __riscv_vslidedown_vx_i16m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i16m1_m(...) __riscv_vslidedown_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i16m2(...) __riscv_vslidedown_vx_i16m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i16m2_m(...) __riscv_vslidedown_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i16m4(...) __riscv_vslidedown_vx_i16m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i16m4_m(...) __riscv_vslidedown_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i16m8(...) __riscv_vslidedown_vx_i16m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i16m8_m(...) __riscv_vslidedown_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i16mf2(...) __riscv_vslidedown_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i16mf2_m(...) __riscv_vslidedown_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i16mf4(...) __riscv_vslidedown_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i16mf4_m(...) __riscv_vslidedown_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i32m1(...) __riscv_vslidedown_vx_i32m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i32m1_m(...) __riscv_vslidedown_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i32m2(...) __riscv_vslidedown_vx_i32m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i32m2_m(...) __riscv_vslidedown_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i32m4(...) __riscv_vslidedown_vx_i32m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i32m4_m(...) __riscv_vslidedown_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i32m8(...) __riscv_vslidedown_vx_i32m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i32m8_m(...) __riscv_vslidedown_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i32mf2(...) __riscv_vslidedown_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i32mf2_m(...) __riscv_vslidedown_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i64m1(...) __riscv_vslidedown_vx_i64m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i64m1_m(...) __riscv_vslidedown_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i64m2(...) __riscv_vslidedown_vx_i64m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i64m2_m(...) __riscv_vslidedown_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i64m4(...) __riscv_vslidedown_vx_i64m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i64m4_m(...) __riscv_vslidedown_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i64m8(...) __riscv_vslidedown_vx_i64m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i64m8_m(...) __riscv_vslidedown_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i8m1(...) __riscv_vslidedown_vx_i8m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i8m1_m(...) __riscv_vslidedown_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i8m2(...) __riscv_vslidedown_vx_i8m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i8m2_m(...) __riscv_vslidedown_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i8m4(...) __riscv_vslidedown_vx_i8m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i8m4_m(...) __riscv_vslidedown_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i8m8(...) __riscv_vslidedown_vx_i8m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i8m8_m(...) __riscv_vslidedown_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i8mf2(...) __riscv_vslidedown_vx_i8mf2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i8mf2_m(...) __riscv_vslidedown_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i8mf4(...) __riscv_vslidedown_vx_i8mf4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i8mf4_m(...) __riscv_vslidedown_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_i8mf8(...) __riscv_vslidedown_vx_i8mf8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_i8mf8_m(...) __riscv_vslidedown_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u16m1(...) __riscv_vslidedown_vx_u16m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u16m1_m(...) __riscv_vslidedown_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u16m2(...) __riscv_vslidedown_vx_u16m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u16m2_m(...) __riscv_vslidedown_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u16m4(...) __riscv_vslidedown_vx_u16m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u16m4_m(...) __riscv_vslidedown_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u16m8(...) __riscv_vslidedown_vx_u16m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u16m8_m(...) __riscv_vslidedown_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u16mf2(...) __riscv_vslidedown_vx_u16mf2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u16mf2_m(...) __riscv_vslidedown_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u16mf4(...) __riscv_vslidedown_vx_u16mf4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u16mf4_m(...) __riscv_vslidedown_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u32m1(...) __riscv_vslidedown_vx_u32m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u32m1_m(...) __riscv_vslidedown_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u32m2(...) __riscv_vslidedown_vx_u32m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u32m2_m(...) __riscv_vslidedown_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u32m4(...) __riscv_vslidedown_vx_u32m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u32m4_m(...) __riscv_vslidedown_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u32m8(...) __riscv_vslidedown_vx_u32m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u32m8_m(...) __riscv_vslidedown_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u32mf2(...) __riscv_vslidedown_vx_u32mf2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u32mf2_m(...) __riscv_vslidedown_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u64m1(...) __riscv_vslidedown_vx_u64m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u64m1_m(...) __riscv_vslidedown_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u64m2(...) __riscv_vslidedown_vx_u64m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u64m2_m(...) __riscv_vslidedown_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u64m4(...) __riscv_vslidedown_vx_u64m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u64m4_m(...) __riscv_vslidedown_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u64m8(...) __riscv_vslidedown_vx_u64m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u64m8_m(...) __riscv_vslidedown_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u8m1(...) __riscv_vslidedown_vx_u8m1_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u8m1_m(...) __riscv_vslidedown_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u8m2(...) __riscv_vslidedown_vx_u8m2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u8m2_m(...) __riscv_vslidedown_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u8m4(...) __riscv_vslidedown_vx_u8m4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u8m4_m(...) __riscv_vslidedown_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u8m8(...) __riscv_vslidedown_vx_u8m8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u8m8_m(...) __riscv_vslidedown_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u8mf2(...) __riscv_vslidedown_vx_u8mf2_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u8mf2_m(...) __riscv_vslidedown_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u8mf4(...) __riscv_vslidedown_vx_u8mf4_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u8mf4_m(...) __riscv_vslidedown_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vslidedown_vx_u8mf8(...) __riscv_vslidedown_vx_u8mf8_tu(__VA_ARGS__) |
| #define | vslidedown_vx_u8mf8_m(...) __riscv_vslidedown_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f16m1(...) __riscv_vslideup_vx_f16m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_f16m1_m(...) __riscv_vslideup_vx_f16m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f16m2(...) __riscv_vslideup_vx_f16m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_f16m2_m(...) __riscv_vslideup_vx_f16m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f16m4(...) __riscv_vslideup_vx_f16m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_f16m4_m(...) __riscv_vslideup_vx_f16m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f16m8(...) __riscv_vslideup_vx_f16m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_f16m8_m(...) __riscv_vslideup_vx_f16m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f16mf2(...) __riscv_vslideup_vx_f16mf2_tu(__VA_ARGS__) |
| #define | vslideup_vx_f16mf2_m(...) __riscv_vslideup_vx_f16mf2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f16mf4(...) __riscv_vslideup_vx_f16mf4_tu(__VA_ARGS__) |
| #define | vslideup_vx_f16mf4_m(...) __riscv_vslideup_vx_f16mf4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f32m1(...) __riscv_vslideup_vx_f32m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_f32m1_m(...) __riscv_vslideup_vx_f32m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f32m2(...) __riscv_vslideup_vx_f32m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_f32m2_m(...) __riscv_vslideup_vx_f32m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f32m4(...) __riscv_vslideup_vx_f32m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_f32m4_m(...) __riscv_vslideup_vx_f32m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f32m8(...) __riscv_vslideup_vx_f32m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_f32m8_m(...) __riscv_vslideup_vx_f32m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f32mf2(...) __riscv_vslideup_vx_f32mf2_tu(__VA_ARGS__) |
| #define | vslideup_vx_f32mf2_m(...) __riscv_vslideup_vx_f32mf2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f64m1(...) __riscv_vslideup_vx_f64m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_f64m1_m(...) __riscv_vslideup_vx_f64m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f64m2(...) __riscv_vslideup_vx_f64m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_f64m2_m(...) __riscv_vslideup_vx_f64m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f64m4(...) __riscv_vslideup_vx_f64m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_f64m4_m(...) __riscv_vslideup_vx_f64m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_f64m8(...) __riscv_vslideup_vx_f64m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_f64m8_m(...) __riscv_vslideup_vx_f64m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i16m1(...) __riscv_vslideup_vx_i16m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_i16m1_m(...) __riscv_vslideup_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i16m2(...) __riscv_vslideup_vx_i16m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_i16m2_m(...) __riscv_vslideup_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i16m4(...) __riscv_vslideup_vx_i16m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_i16m4_m(...) __riscv_vslideup_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i16m8(...) __riscv_vslideup_vx_i16m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_i16m8_m(...) __riscv_vslideup_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i16mf2(...) __riscv_vslideup_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vslideup_vx_i16mf2_m(...) __riscv_vslideup_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i16mf4(...) __riscv_vslideup_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vslideup_vx_i16mf4_m(...) __riscv_vslideup_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i32m1(...) __riscv_vslideup_vx_i32m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_i32m1_m(...) __riscv_vslideup_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i32m2(...) __riscv_vslideup_vx_i32m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_i32m2_m(...) __riscv_vslideup_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i32m4(...) __riscv_vslideup_vx_i32m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_i32m4_m(...) __riscv_vslideup_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i32m8(...) __riscv_vslideup_vx_i32m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_i32m8_m(...) __riscv_vslideup_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i32mf2(...) __riscv_vslideup_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vslideup_vx_i32mf2_m(...) __riscv_vslideup_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i64m1(...) __riscv_vslideup_vx_i64m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_i64m1_m(...) __riscv_vslideup_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i64m2(...) __riscv_vslideup_vx_i64m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_i64m2_m(...) __riscv_vslideup_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i64m4(...) __riscv_vslideup_vx_i64m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_i64m4_m(...) __riscv_vslideup_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i64m8(...) __riscv_vslideup_vx_i64m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_i64m8_m(...) __riscv_vslideup_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i8m1(...) __riscv_vslideup_vx_i8m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_i8m1_m(...) __riscv_vslideup_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i8m2(...) __riscv_vslideup_vx_i8m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_i8m2_m(...) __riscv_vslideup_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i8m4(...) __riscv_vslideup_vx_i8m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_i8m4_m(...) __riscv_vslideup_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i8m8(...) __riscv_vslideup_vx_i8m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_i8m8_m(...) __riscv_vslideup_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i8mf2(...) __riscv_vslideup_vx_i8mf2_tu(__VA_ARGS__) |
| #define | vslideup_vx_i8mf2_m(...) __riscv_vslideup_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i8mf4(...) __riscv_vslideup_vx_i8mf4_tu(__VA_ARGS__) |
| #define | vslideup_vx_i8mf4_m(...) __riscv_vslideup_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_i8mf8(...) __riscv_vslideup_vx_i8mf8_tu(__VA_ARGS__) |
| #define | vslideup_vx_i8mf8_m(...) __riscv_vslideup_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u16m1(...) __riscv_vslideup_vx_u16m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_u16m1_m(...) __riscv_vslideup_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u16m2(...) __riscv_vslideup_vx_u16m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_u16m2_m(...) __riscv_vslideup_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u16m4(...) __riscv_vslideup_vx_u16m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_u16m4_m(...) __riscv_vslideup_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u16m8(...) __riscv_vslideup_vx_u16m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_u16m8_m(...) __riscv_vslideup_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u16mf2(...) __riscv_vslideup_vx_u16mf2_tu(__VA_ARGS__) |
| #define | vslideup_vx_u16mf2_m(...) __riscv_vslideup_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u16mf4(...) __riscv_vslideup_vx_u16mf4_tu(__VA_ARGS__) |
| #define | vslideup_vx_u16mf4_m(...) __riscv_vslideup_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u32m1(...) __riscv_vslideup_vx_u32m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_u32m1_m(...) __riscv_vslideup_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u32m2(...) __riscv_vslideup_vx_u32m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_u32m2_m(...) __riscv_vslideup_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u32m4(...) __riscv_vslideup_vx_u32m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_u32m4_m(...) __riscv_vslideup_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u32m8(...) __riscv_vslideup_vx_u32m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_u32m8_m(...) __riscv_vslideup_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u32mf2(...) __riscv_vslideup_vx_u32mf2_tu(__VA_ARGS__) |
| #define | vslideup_vx_u32mf2_m(...) __riscv_vslideup_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u64m1(...) __riscv_vslideup_vx_u64m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_u64m1_m(...) __riscv_vslideup_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u64m2(...) __riscv_vslideup_vx_u64m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_u64m2_m(...) __riscv_vslideup_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u64m4(...) __riscv_vslideup_vx_u64m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_u64m4_m(...) __riscv_vslideup_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u64m8(...) __riscv_vslideup_vx_u64m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_u64m8_m(...) __riscv_vslideup_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u8m1(...) __riscv_vslideup_vx_u8m1_tu(__VA_ARGS__) |
| #define | vslideup_vx_u8m1_m(...) __riscv_vslideup_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u8m2(...) __riscv_vslideup_vx_u8m2_tu(__VA_ARGS__) |
| #define | vslideup_vx_u8m2_m(...) __riscv_vslideup_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u8m4(...) __riscv_vslideup_vx_u8m4_tu(__VA_ARGS__) |
| #define | vslideup_vx_u8m4_m(...) __riscv_vslideup_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u8m8(...) __riscv_vslideup_vx_u8m8_tu(__VA_ARGS__) |
| #define | vslideup_vx_u8m8_m(...) __riscv_vslideup_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u8mf2(...) __riscv_vslideup_vx_u8mf2_tu(__VA_ARGS__) |
| #define | vslideup_vx_u8mf2_m(...) __riscv_vslideup_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u8mf4(...) __riscv_vslideup_vx_u8mf4_tu(__VA_ARGS__) |
| #define | vslideup_vx_u8mf4_m(...) __riscv_vslideup_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vslideup_vx_u8mf8(...) __riscv_vslideup_vx_u8mf8_tu(__VA_ARGS__) |
| #define | vslideup_vx_u8mf8_m(...) __riscv_vslideup_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vsll_vv_i16m1(...) __riscv_vsll_vv_i16m1(__VA_ARGS__) |
| #define | vsll_vv_i16m1_m(...) __riscv_vsll_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vsll_vv_i16m2(...) __riscv_vsll_vv_i16m2(__VA_ARGS__) |
| #define | vsll_vv_i16m2_m(...) __riscv_vsll_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vsll_vv_i16m4(...) __riscv_vsll_vv_i16m4(__VA_ARGS__) |
| #define | vsll_vv_i16m4_m(...) __riscv_vsll_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vsll_vv_i16m8(...) __riscv_vsll_vv_i16m8(__VA_ARGS__) |
| #define | vsll_vv_i16m8_m(...) __riscv_vsll_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vsll_vv_i16mf2(...) __riscv_vsll_vv_i16mf2(__VA_ARGS__) |
| #define | vsll_vv_i16mf2_m(...) __riscv_vsll_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vsll_vv_i16mf4(...) __riscv_vsll_vv_i16mf4(__VA_ARGS__) |
| #define | vsll_vv_i16mf4_m(...) __riscv_vsll_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vsll_vv_i32m1(...) __riscv_vsll_vv_i32m1(__VA_ARGS__) |
| #define | vsll_vv_i32m1_m(...) __riscv_vsll_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vsll_vv_i32m2(...) __riscv_vsll_vv_i32m2(__VA_ARGS__) |
| #define | vsll_vv_i32m2_m(...) __riscv_vsll_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vsll_vv_i32m4(...) __riscv_vsll_vv_i32m4(__VA_ARGS__) |
| #define | vsll_vv_i32m4_m(...) __riscv_vsll_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vsll_vv_i32m8(...) __riscv_vsll_vv_i32m8(__VA_ARGS__) |
| #define | vsll_vv_i32m8_m(...) __riscv_vsll_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vsll_vv_i32mf2(...) __riscv_vsll_vv_i32mf2(__VA_ARGS__) |
| #define | vsll_vv_i32mf2_m(...) __riscv_vsll_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vsll_vv_i64m1(...) __riscv_vsll_vv_i64m1(__VA_ARGS__) |
| #define | vsll_vv_i64m1_m(...) __riscv_vsll_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vsll_vv_i64m2(...) __riscv_vsll_vv_i64m2(__VA_ARGS__) |
| #define | vsll_vv_i64m2_m(...) __riscv_vsll_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vsll_vv_i64m4(...) __riscv_vsll_vv_i64m4(__VA_ARGS__) |
| #define | vsll_vv_i64m4_m(...) __riscv_vsll_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vsll_vv_i64m8(...) __riscv_vsll_vv_i64m8(__VA_ARGS__) |
| #define | vsll_vv_i64m8_m(...) __riscv_vsll_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vsll_vv_i8m1(...) __riscv_vsll_vv_i8m1(__VA_ARGS__) |
| #define | vsll_vv_i8m1_m(...) __riscv_vsll_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vsll_vv_i8m2(...) __riscv_vsll_vv_i8m2(__VA_ARGS__) |
| #define | vsll_vv_i8m2_m(...) __riscv_vsll_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vsll_vv_i8m4(...) __riscv_vsll_vv_i8m4(__VA_ARGS__) |
| #define | vsll_vv_i8m4_m(...) __riscv_vsll_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vsll_vv_i8m8(...) __riscv_vsll_vv_i8m8(__VA_ARGS__) |
| #define | vsll_vv_i8m8_m(...) __riscv_vsll_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vsll_vv_i8mf2(...) __riscv_vsll_vv_i8mf2(__VA_ARGS__) |
| #define | vsll_vv_i8mf2_m(...) __riscv_vsll_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vsll_vv_i8mf4(...) __riscv_vsll_vv_i8mf4(__VA_ARGS__) |
| #define | vsll_vv_i8mf4_m(...) __riscv_vsll_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vsll_vv_i8mf8(...) __riscv_vsll_vv_i8mf8(__VA_ARGS__) |
| #define | vsll_vv_i8mf8_m(...) __riscv_vsll_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vsll_vv_u16m1(...) __riscv_vsll_vv_u16m1(__VA_ARGS__) |
| #define | vsll_vv_u16m1_m(...) __riscv_vsll_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vsll_vv_u16m2(...) __riscv_vsll_vv_u16m2(__VA_ARGS__) |
| #define | vsll_vv_u16m2_m(...) __riscv_vsll_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vsll_vv_u16m4(...) __riscv_vsll_vv_u16m4(__VA_ARGS__) |
| #define | vsll_vv_u16m4_m(...) __riscv_vsll_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vsll_vv_u16m8(...) __riscv_vsll_vv_u16m8(__VA_ARGS__) |
| #define | vsll_vv_u16m8_m(...) __riscv_vsll_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vsll_vv_u16mf2(...) __riscv_vsll_vv_u16mf2(__VA_ARGS__) |
| #define | vsll_vv_u16mf2_m(...) __riscv_vsll_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vsll_vv_u16mf4(...) __riscv_vsll_vv_u16mf4(__VA_ARGS__) |
| #define | vsll_vv_u16mf4_m(...) __riscv_vsll_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vsll_vv_u32m1(...) __riscv_vsll_vv_u32m1(__VA_ARGS__) |
| #define | vsll_vv_u32m1_m(...) __riscv_vsll_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vsll_vv_u32m2(...) __riscv_vsll_vv_u32m2(__VA_ARGS__) |
| #define | vsll_vv_u32m2_m(...) __riscv_vsll_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vsll_vv_u32m4(...) __riscv_vsll_vv_u32m4(__VA_ARGS__) |
| #define | vsll_vv_u32m4_m(...) __riscv_vsll_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vsll_vv_u32m8(...) __riscv_vsll_vv_u32m8(__VA_ARGS__) |
| #define | vsll_vv_u32m8_m(...) __riscv_vsll_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vsll_vv_u32mf2(...) __riscv_vsll_vv_u32mf2(__VA_ARGS__) |
| #define | vsll_vv_u32mf2_m(...) __riscv_vsll_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vsll_vv_u64m1(...) __riscv_vsll_vv_u64m1(__VA_ARGS__) |
| #define | vsll_vv_u64m1_m(...) __riscv_vsll_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vsll_vv_u64m2(...) __riscv_vsll_vv_u64m2(__VA_ARGS__) |
| #define | vsll_vv_u64m2_m(...) __riscv_vsll_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vsll_vv_u64m4(...) __riscv_vsll_vv_u64m4(__VA_ARGS__) |
| #define | vsll_vv_u64m4_m(...) __riscv_vsll_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vsll_vv_u64m8(...) __riscv_vsll_vv_u64m8(__VA_ARGS__) |
| #define | vsll_vv_u64m8_m(...) __riscv_vsll_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vsll_vv_u8m1(...) __riscv_vsll_vv_u8m1(__VA_ARGS__) |
| #define | vsll_vv_u8m1_m(...) __riscv_vsll_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vsll_vv_u8m2(...) __riscv_vsll_vv_u8m2(__VA_ARGS__) |
| #define | vsll_vv_u8m2_m(...) __riscv_vsll_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vsll_vv_u8m4(...) __riscv_vsll_vv_u8m4(__VA_ARGS__) |
| #define | vsll_vv_u8m4_m(...) __riscv_vsll_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vsll_vv_u8m8(...) __riscv_vsll_vv_u8m8(__VA_ARGS__) |
| #define | vsll_vv_u8m8_m(...) __riscv_vsll_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vsll_vv_u8mf2(...) __riscv_vsll_vv_u8mf2(__VA_ARGS__) |
| #define | vsll_vv_u8mf2_m(...) __riscv_vsll_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vsll_vv_u8mf4(...) __riscv_vsll_vv_u8mf4(__VA_ARGS__) |
| #define | vsll_vv_u8mf4_m(...) __riscv_vsll_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vsll_vv_u8mf8(...) __riscv_vsll_vv_u8mf8(__VA_ARGS__) |
| #define | vsll_vv_u8mf8_m(...) __riscv_vsll_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vsll_vx_i16m1(...) __riscv_vsll_vx_i16m1(__VA_ARGS__) |
| #define | vsll_vx_i16m1_m(...) __riscv_vsll_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vsll_vx_i16m2(...) __riscv_vsll_vx_i16m2(__VA_ARGS__) |
| #define | vsll_vx_i16m2_m(...) __riscv_vsll_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vsll_vx_i16m4(...) __riscv_vsll_vx_i16m4(__VA_ARGS__) |
| #define | vsll_vx_i16m4_m(...) __riscv_vsll_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vsll_vx_i16m8(...) __riscv_vsll_vx_i16m8(__VA_ARGS__) |
| #define | vsll_vx_i16m8_m(...) __riscv_vsll_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vsll_vx_i16mf2(...) __riscv_vsll_vx_i16mf2(__VA_ARGS__) |
| #define | vsll_vx_i16mf2_m(...) __riscv_vsll_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vsll_vx_i16mf4(...) __riscv_vsll_vx_i16mf4(__VA_ARGS__) |
| #define | vsll_vx_i16mf4_m(...) __riscv_vsll_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vsll_vx_i32m1(...) __riscv_vsll_vx_i32m1(__VA_ARGS__) |
| #define | vsll_vx_i32m1_m(...) __riscv_vsll_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vsll_vx_i32m2(...) __riscv_vsll_vx_i32m2(__VA_ARGS__) |
| #define | vsll_vx_i32m2_m(...) __riscv_vsll_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vsll_vx_i32m4(...) __riscv_vsll_vx_i32m4(__VA_ARGS__) |
| #define | vsll_vx_i32m4_m(...) __riscv_vsll_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vsll_vx_i32m8(...) __riscv_vsll_vx_i32m8(__VA_ARGS__) |
| #define | vsll_vx_i32m8_m(...) __riscv_vsll_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vsll_vx_i32mf2(...) __riscv_vsll_vx_i32mf2(__VA_ARGS__) |
| #define | vsll_vx_i32mf2_m(...) __riscv_vsll_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vsll_vx_i64m1(...) __riscv_vsll_vx_i64m1(__VA_ARGS__) |
| #define | vsll_vx_i64m1_m(...) __riscv_vsll_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vsll_vx_i64m2(...) __riscv_vsll_vx_i64m2(__VA_ARGS__) |
| #define | vsll_vx_i64m2_m(...) __riscv_vsll_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vsll_vx_i64m4(...) __riscv_vsll_vx_i64m4(__VA_ARGS__) |
| #define | vsll_vx_i64m4_m(...) __riscv_vsll_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vsll_vx_i64m8(...) __riscv_vsll_vx_i64m8(__VA_ARGS__) |
| #define | vsll_vx_i64m8_m(...) __riscv_vsll_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vsll_vx_i8m1(...) __riscv_vsll_vx_i8m1(__VA_ARGS__) |
| #define | vsll_vx_i8m1_m(...) __riscv_vsll_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vsll_vx_i8m2(...) __riscv_vsll_vx_i8m2(__VA_ARGS__) |
| #define | vsll_vx_i8m2_m(...) __riscv_vsll_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vsll_vx_i8m4(...) __riscv_vsll_vx_i8m4(__VA_ARGS__) |
| #define | vsll_vx_i8m4_m(...) __riscv_vsll_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vsll_vx_i8m8(...) __riscv_vsll_vx_i8m8(__VA_ARGS__) |
| #define | vsll_vx_i8m8_m(...) __riscv_vsll_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vsll_vx_i8mf2(...) __riscv_vsll_vx_i8mf2(__VA_ARGS__) |
| #define | vsll_vx_i8mf2_m(...) __riscv_vsll_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vsll_vx_i8mf4(...) __riscv_vsll_vx_i8mf4(__VA_ARGS__) |
| #define | vsll_vx_i8mf4_m(...) __riscv_vsll_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vsll_vx_i8mf8(...) __riscv_vsll_vx_i8mf8(__VA_ARGS__) |
| #define | vsll_vx_i8mf8_m(...) __riscv_vsll_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vsll_vx_u16m1(...) __riscv_vsll_vx_u16m1(__VA_ARGS__) |
| #define | vsll_vx_u16m1_m(...) __riscv_vsll_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vsll_vx_u16m2(...) __riscv_vsll_vx_u16m2(__VA_ARGS__) |
| #define | vsll_vx_u16m2_m(...) __riscv_vsll_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vsll_vx_u16m4(...) __riscv_vsll_vx_u16m4(__VA_ARGS__) |
| #define | vsll_vx_u16m4_m(...) __riscv_vsll_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vsll_vx_u16m8(...) __riscv_vsll_vx_u16m8(__VA_ARGS__) |
| #define | vsll_vx_u16m8_m(...) __riscv_vsll_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vsll_vx_u16mf2(...) __riscv_vsll_vx_u16mf2(__VA_ARGS__) |
| #define | vsll_vx_u16mf2_m(...) __riscv_vsll_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vsll_vx_u16mf4(...) __riscv_vsll_vx_u16mf4(__VA_ARGS__) |
| #define | vsll_vx_u16mf4_m(...) __riscv_vsll_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vsll_vx_u32m1(...) __riscv_vsll_vx_u32m1(__VA_ARGS__) |
| #define | vsll_vx_u32m1_m(...) __riscv_vsll_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vsll_vx_u32m2(...) __riscv_vsll_vx_u32m2(__VA_ARGS__) |
| #define | vsll_vx_u32m2_m(...) __riscv_vsll_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vsll_vx_u32m4(...) __riscv_vsll_vx_u32m4(__VA_ARGS__) |
| #define | vsll_vx_u32m4_m(...) __riscv_vsll_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vsll_vx_u32m8(...) __riscv_vsll_vx_u32m8(__VA_ARGS__) |
| #define | vsll_vx_u32m8_m(...) __riscv_vsll_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vsll_vx_u32mf2(...) __riscv_vsll_vx_u32mf2(__VA_ARGS__) |
| #define | vsll_vx_u32mf2_m(...) __riscv_vsll_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vsll_vx_u64m1(...) __riscv_vsll_vx_u64m1(__VA_ARGS__) |
| #define | vsll_vx_u64m1_m(...) __riscv_vsll_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vsll_vx_u64m2(...) __riscv_vsll_vx_u64m2(__VA_ARGS__) |
| #define | vsll_vx_u64m2_m(...) __riscv_vsll_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vsll_vx_u64m4(...) __riscv_vsll_vx_u64m4(__VA_ARGS__) |
| #define | vsll_vx_u64m4_m(...) __riscv_vsll_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vsll_vx_u64m8(...) __riscv_vsll_vx_u64m8(__VA_ARGS__) |
| #define | vsll_vx_u64m8_m(...) __riscv_vsll_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vsll_vx_u8m1(...) __riscv_vsll_vx_u8m1(__VA_ARGS__) |
| #define | vsll_vx_u8m1_m(...) __riscv_vsll_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vsll_vx_u8m2(...) __riscv_vsll_vx_u8m2(__VA_ARGS__) |
| #define | vsll_vx_u8m2_m(...) __riscv_vsll_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vsll_vx_u8m4(...) __riscv_vsll_vx_u8m4(__VA_ARGS__) |
| #define | vsll_vx_u8m4_m(...) __riscv_vsll_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vsll_vx_u8m8(...) __riscv_vsll_vx_u8m8(__VA_ARGS__) |
| #define | vsll_vx_u8m8_m(...) __riscv_vsll_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vsll_vx_u8mf2(...) __riscv_vsll_vx_u8mf2(__VA_ARGS__) |
| #define | vsll_vx_u8mf2_m(...) __riscv_vsll_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vsll_vx_u8mf4(...) __riscv_vsll_vx_u8mf4(__VA_ARGS__) |
| #define | vsll_vx_u8mf4_m(...) __riscv_vsll_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vsll_vx_u8mf8(...) __riscv_vsll_vx_u8mf8(__VA_ARGS__) |
| #define | vsll_vx_u8mf8_m(...) __riscv_vsll_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vsm_v_b1(...) __riscv_vsm_v_b1(__VA_ARGS__) |
| #define | vsm_v_b16(...) __riscv_vsm_v_b16(__VA_ARGS__) |
| #define | vsm_v_b2(...) __riscv_vsm_v_b2(__VA_ARGS__) |
| #define | vsm_v_b32(...) __riscv_vsm_v_b32(__VA_ARGS__) |
| #define | vsm_v_b4(...) __riscv_vsm_v_b4(__VA_ARGS__) |
| #define | vsm_v_b64(...) __riscv_vsm_v_b64(__VA_ARGS__) |
| #define | vsm_v_b8(...) __riscv_vsm_v_b8(__VA_ARGS__) |
| #define | vsmul_vv_i16m1(...) __riscv_vsmul_vv_i16m1(__VA_ARGS__) |
| #define | vsmul_vv_i16m1_m(...) __riscv_vsmul_vv_i16m1_mu(__VA_ARGS__) |
| #define | vsmul_vv_i16m2(...) __riscv_vsmul_vv_i16m2(__VA_ARGS__) |
| #define | vsmul_vv_i16m2_m(...) __riscv_vsmul_vv_i16m2_mu(__VA_ARGS__) |
| #define | vsmul_vv_i16m4(...) __riscv_vsmul_vv_i16m4(__VA_ARGS__) |
| #define | vsmul_vv_i16m4_m(...) __riscv_vsmul_vv_i16m4_mu(__VA_ARGS__) |
| #define | vsmul_vv_i16m8(...) __riscv_vsmul_vv_i16m8(__VA_ARGS__) |
| #define | vsmul_vv_i16m8_m(...) __riscv_vsmul_vv_i16m8_mu(__VA_ARGS__) |
| #define | vsmul_vv_i16mf2(...) __riscv_vsmul_vv_i16mf2(__VA_ARGS__) |
| #define | vsmul_vv_i16mf2_m(...) __riscv_vsmul_vv_i16mf2_mu(__VA_ARGS__) |
| #define | vsmul_vv_i16mf4(...) __riscv_vsmul_vv_i16mf4(__VA_ARGS__) |
| #define | vsmul_vv_i16mf4_m(...) __riscv_vsmul_vv_i16mf4_mu(__VA_ARGS__) |
| #define | vsmul_vv_i32m1(...) __riscv_vsmul_vv_i32m1(__VA_ARGS__) |
| #define | vsmul_vv_i32m1_m(...) __riscv_vsmul_vv_i32m1_mu(__VA_ARGS__) |
| #define | vsmul_vv_i32m2(...) __riscv_vsmul_vv_i32m2(__VA_ARGS__) |
| #define | vsmul_vv_i32m2_m(...) __riscv_vsmul_vv_i32m2_mu(__VA_ARGS__) |
| #define | vsmul_vv_i32m4(...) __riscv_vsmul_vv_i32m4(__VA_ARGS__) |
| #define | vsmul_vv_i32m4_m(...) __riscv_vsmul_vv_i32m4_mu(__VA_ARGS__) |
| #define | vsmul_vv_i32m8(...) __riscv_vsmul_vv_i32m8(__VA_ARGS__) |
| #define | vsmul_vv_i32m8_m(...) __riscv_vsmul_vv_i32m8_mu(__VA_ARGS__) |
| #define | vsmul_vv_i32mf2(...) __riscv_vsmul_vv_i32mf2(__VA_ARGS__) |
| #define | vsmul_vv_i32mf2_m(...) __riscv_vsmul_vv_i32mf2_mu(__VA_ARGS__) |
| #define | vsmul_vv_i64m1(...) __riscv_vsmul_vv_i64m1(__VA_ARGS__) |
| #define | vsmul_vv_i64m1_m(...) __riscv_vsmul_vv_i64m1_mu(__VA_ARGS__) |
| #define | vsmul_vv_i64m2(...) __riscv_vsmul_vv_i64m2(__VA_ARGS__) |
| #define | vsmul_vv_i64m2_m(...) __riscv_vsmul_vv_i64m2_mu(__VA_ARGS__) |
| #define | vsmul_vv_i64m4(...) __riscv_vsmul_vv_i64m4(__VA_ARGS__) |
| #define | vsmul_vv_i64m4_m(...) __riscv_vsmul_vv_i64m4_mu(__VA_ARGS__) |
| #define | vsmul_vv_i64m8(...) __riscv_vsmul_vv_i64m8(__VA_ARGS__) |
| #define | vsmul_vv_i64m8_m(...) __riscv_vsmul_vv_i64m8_mu(__VA_ARGS__) |
| #define | vsmul_vv_i8m1(...) __riscv_vsmul_vv_i8m1(__VA_ARGS__) |
| #define | vsmul_vv_i8m1_m(...) __riscv_vsmul_vv_i8m1_mu(__VA_ARGS__) |
| #define | vsmul_vv_i8m2(...) __riscv_vsmul_vv_i8m2(__VA_ARGS__) |
| #define | vsmul_vv_i8m2_m(...) __riscv_vsmul_vv_i8m2_mu(__VA_ARGS__) |
| #define | vsmul_vv_i8m4(...) __riscv_vsmul_vv_i8m4(__VA_ARGS__) |
| #define | vsmul_vv_i8m4_m(...) __riscv_vsmul_vv_i8m4_mu(__VA_ARGS__) |
| #define | vsmul_vv_i8m8(...) __riscv_vsmul_vv_i8m8(__VA_ARGS__) |
| #define | vsmul_vv_i8m8_m(...) __riscv_vsmul_vv_i8m8_mu(__VA_ARGS__) |
| #define | vsmul_vv_i8mf2(...) __riscv_vsmul_vv_i8mf2(__VA_ARGS__) |
| #define | vsmul_vv_i8mf2_m(...) __riscv_vsmul_vv_i8mf2_mu(__VA_ARGS__) |
| #define | vsmul_vv_i8mf4(...) __riscv_vsmul_vv_i8mf4(__VA_ARGS__) |
| #define | vsmul_vv_i8mf4_m(...) __riscv_vsmul_vv_i8mf4_mu(__VA_ARGS__) |
| #define | vsmul_vv_i8mf8(...) __riscv_vsmul_vv_i8mf8(__VA_ARGS__) |
| #define | vsmul_vv_i8mf8_m(...) __riscv_vsmul_vv_i8mf8_mu(__VA_ARGS__) |
| #define | vsmul_vx_i16m1(...) __riscv_vsmul_vx_i16m1(__VA_ARGS__) |
| #define | vsmul_vx_i16m1_m(...) __riscv_vsmul_vx_i16m1_mu(__VA_ARGS__) |
| #define | vsmul_vx_i16m2(...) __riscv_vsmul_vx_i16m2(__VA_ARGS__) |
| #define | vsmul_vx_i16m2_m(...) __riscv_vsmul_vx_i16m2_mu(__VA_ARGS__) |
| #define | vsmul_vx_i16m4(...) __riscv_vsmul_vx_i16m4(__VA_ARGS__) |
| #define | vsmul_vx_i16m4_m(...) __riscv_vsmul_vx_i16m4_mu(__VA_ARGS__) |
| #define | vsmul_vx_i16m8(...) __riscv_vsmul_vx_i16m8(__VA_ARGS__) |
| #define | vsmul_vx_i16m8_m(...) __riscv_vsmul_vx_i16m8_mu(__VA_ARGS__) |
| #define | vsmul_vx_i16mf2(...) __riscv_vsmul_vx_i16mf2(__VA_ARGS__) |
| #define | vsmul_vx_i16mf2_m(...) __riscv_vsmul_vx_i16mf2_mu(__VA_ARGS__) |
| #define | vsmul_vx_i16mf4(...) __riscv_vsmul_vx_i16mf4(__VA_ARGS__) |
| #define | vsmul_vx_i16mf4_m(...) __riscv_vsmul_vx_i16mf4_mu(__VA_ARGS__) |
| #define | vsmul_vx_i32m1(...) __riscv_vsmul_vx_i32m1(__VA_ARGS__) |
| #define | vsmul_vx_i32m1_m(...) __riscv_vsmul_vx_i32m1_mu(__VA_ARGS__) |
| #define | vsmul_vx_i32m2(...) __riscv_vsmul_vx_i32m2(__VA_ARGS__) |
| #define | vsmul_vx_i32m2_m(...) __riscv_vsmul_vx_i32m2_mu(__VA_ARGS__) |
| #define | vsmul_vx_i32m4(...) __riscv_vsmul_vx_i32m4(__VA_ARGS__) |
| #define | vsmul_vx_i32m4_m(...) __riscv_vsmul_vx_i32m4_mu(__VA_ARGS__) |
| #define | vsmul_vx_i32m8(...) __riscv_vsmul_vx_i32m8(__VA_ARGS__) |
| #define | vsmul_vx_i32m8_m(...) __riscv_vsmul_vx_i32m8_mu(__VA_ARGS__) |
| #define | vsmul_vx_i32mf2(...) __riscv_vsmul_vx_i32mf2(__VA_ARGS__) |
| #define | vsmul_vx_i32mf2_m(...) __riscv_vsmul_vx_i32mf2_mu(__VA_ARGS__) |
| #define | vsmul_vx_i64m1(...) __riscv_vsmul_vx_i64m1(__VA_ARGS__) |
| #define | vsmul_vx_i64m1_m(...) __riscv_vsmul_vx_i64m1_mu(__VA_ARGS__) |
| #define | vsmul_vx_i64m2(...) __riscv_vsmul_vx_i64m2(__VA_ARGS__) |
| #define | vsmul_vx_i64m2_m(...) __riscv_vsmul_vx_i64m2_mu(__VA_ARGS__) |
| #define | vsmul_vx_i64m4(...) __riscv_vsmul_vx_i64m4(__VA_ARGS__) |
| #define | vsmul_vx_i64m4_m(...) __riscv_vsmul_vx_i64m4_mu(__VA_ARGS__) |
| #define | vsmul_vx_i64m8(...) __riscv_vsmul_vx_i64m8(__VA_ARGS__) |
| #define | vsmul_vx_i64m8_m(...) __riscv_vsmul_vx_i64m8_mu(__VA_ARGS__) |
| #define | vsmul_vx_i8m1(...) __riscv_vsmul_vx_i8m1(__VA_ARGS__) |
| #define | vsmul_vx_i8m1_m(...) __riscv_vsmul_vx_i8m1_mu(__VA_ARGS__) |
| #define | vsmul_vx_i8m2(...) __riscv_vsmul_vx_i8m2(__VA_ARGS__) |
| #define | vsmul_vx_i8m2_m(...) __riscv_vsmul_vx_i8m2_mu(__VA_ARGS__) |
| #define | vsmul_vx_i8m4(...) __riscv_vsmul_vx_i8m4(__VA_ARGS__) |
| #define | vsmul_vx_i8m4_m(...) __riscv_vsmul_vx_i8m4_mu(__VA_ARGS__) |
| #define | vsmul_vx_i8m8(...) __riscv_vsmul_vx_i8m8(__VA_ARGS__) |
| #define | vsmul_vx_i8m8_m(...) __riscv_vsmul_vx_i8m8_mu(__VA_ARGS__) |
| #define | vsmul_vx_i8mf2(...) __riscv_vsmul_vx_i8mf2(__VA_ARGS__) |
| #define | vsmul_vx_i8mf2_m(...) __riscv_vsmul_vx_i8mf2_mu(__VA_ARGS__) |
| #define | vsmul_vx_i8mf4(...) __riscv_vsmul_vx_i8mf4(__VA_ARGS__) |
| #define | vsmul_vx_i8mf4_m(...) __riscv_vsmul_vx_i8mf4_mu(__VA_ARGS__) |
| #define | vsmul_vx_i8mf8(...) __riscv_vsmul_vx_i8mf8(__VA_ARGS__) |
| #define | vsmul_vx_i8mf8_m(...) __riscv_vsmul_vx_i8mf8_mu(__VA_ARGS__) |
| #define | vsoxei16_v_f16m1(...) __riscv_vsoxei16_v_f16m1(__VA_ARGS__) |
| #define | vsoxei16_v_f16m1_m(...) __riscv_vsoxei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_f16m2(...) __riscv_vsoxei16_v_f16m2(__VA_ARGS__) |
| #define | vsoxei16_v_f16m2_m(...) __riscv_vsoxei16_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_f16m4(...) __riscv_vsoxei16_v_f16m4(__VA_ARGS__) |
| #define | vsoxei16_v_f16m4_m(...) __riscv_vsoxei16_v_f16m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_f16m8(...) __riscv_vsoxei16_v_f16m8(__VA_ARGS__) |
| #define | vsoxei16_v_f16m8_m(...) __riscv_vsoxei16_v_f16m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_f16mf2(...) __riscv_vsoxei16_v_f16mf2(__VA_ARGS__) |
| #define | vsoxei16_v_f16mf2_m(...) __riscv_vsoxei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxei16_v_f16mf4(...) __riscv_vsoxei16_v_f16mf4(__VA_ARGS__) |
| #define | vsoxei16_v_f16mf4_m(...) __riscv_vsoxei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxei16_v_f32m1(...) __riscv_vsoxei16_v_f32m1(__VA_ARGS__) |
| #define | vsoxei16_v_f32m1_m(...) __riscv_vsoxei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_f32m2(...) __riscv_vsoxei16_v_f32m2(__VA_ARGS__) |
| #define | vsoxei16_v_f32m2_m(...) __riscv_vsoxei16_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_f32m4(...) __riscv_vsoxei16_v_f32m4(__VA_ARGS__) |
| #define | vsoxei16_v_f32m4_m(...) __riscv_vsoxei16_v_f32m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_f32m8(...) __riscv_vsoxei16_v_f32m8(__VA_ARGS__) |
| #define | vsoxei16_v_f32m8_m(...) __riscv_vsoxei16_v_f32m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_f32mf2(...) __riscv_vsoxei16_v_f32mf2(__VA_ARGS__) |
| #define | vsoxei16_v_f32mf2_m(...) __riscv_vsoxei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxei16_v_f64m1(...) __riscv_vsoxei16_v_f64m1(__VA_ARGS__) |
| #define | vsoxei16_v_f64m1_m(...) __riscv_vsoxei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_f64m2(...) __riscv_vsoxei16_v_f64m2(__VA_ARGS__) |
| #define | vsoxei16_v_f64m2_m(...) __riscv_vsoxei16_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_f64m4(...) __riscv_vsoxei16_v_f64m4(__VA_ARGS__) |
| #define | vsoxei16_v_f64m4_m(...) __riscv_vsoxei16_v_f64m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_f64m8(...) __riscv_vsoxei16_v_f64m8(__VA_ARGS__) |
| #define | vsoxei16_v_f64m8_m(...) __riscv_vsoxei16_v_f64m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_i16m1(...) __riscv_vsoxei16_v_i16m1(__VA_ARGS__) |
| #define | vsoxei16_v_i16m1_m(...) __riscv_vsoxei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_i16m2(...) __riscv_vsoxei16_v_i16m2(__VA_ARGS__) |
| #define | vsoxei16_v_i16m2_m(...) __riscv_vsoxei16_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_i16m4(...) __riscv_vsoxei16_v_i16m4(__VA_ARGS__) |
| #define | vsoxei16_v_i16m4_m(...) __riscv_vsoxei16_v_i16m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_i16m8(...) __riscv_vsoxei16_v_i16m8(__VA_ARGS__) |
| #define | vsoxei16_v_i16m8_m(...) __riscv_vsoxei16_v_i16m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_i16mf2(...) __riscv_vsoxei16_v_i16mf2(__VA_ARGS__) |
| #define | vsoxei16_v_i16mf2_m(...) __riscv_vsoxei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxei16_v_i16mf4(...) __riscv_vsoxei16_v_i16mf4(__VA_ARGS__) |
| #define | vsoxei16_v_i16mf4_m(...) __riscv_vsoxei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxei16_v_i32m1(...) __riscv_vsoxei16_v_i32m1(__VA_ARGS__) |
| #define | vsoxei16_v_i32m1_m(...) __riscv_vsoxei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_i32m2(...) __riscv_vsoxei16_v_i32m2(__VA_ARGS__) |
| #define | vsoxei16_v_i32m2_m(...) __riscv_vsoxei16_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_i32m4(...) __riscv_vsoxei16_v_i32m4(__VA_ARGS__) |
| #define | vsoxei16_v_i32m4_m(...) __riscv_vsoxei16_v_i32m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_i32m8(...) __riscv_vsoxei16_v_i32m8(__VA_ARGS__) |
| #define | vsoxei16_v_i32m8_m(...) __riscv_vsoxei16_v_i32m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_i32mf2(...) __riscv_vsoxei16_v_i32mf2(__VA_ARGS__) |
| #define | vsoxei16_v_i32mf2_m(...) __riscv_vsoxei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxei16_v_i64m1(...) __riscv_vsoxei16_v_i64m1(__VA_ARGS__) |
| #define | vsoxei16_v_i64m1_m(...) __riscv_vsoxei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_i64m2(...) __riscv_vsoxei16_v_i64m2(__VA_ARGS__) |
| #define | vsoxei16_v_i64m2_m(...) __riscv_vsoxei16_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_i64m4(...) __riscv_vsoxei16_v_i64m4(__VA_ARGS__) |
| #define | vsoxei16_v_i64m4_m(...) __riscv_vsoxei16_v_i64m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_i64m8(...) __riscv_vsoxei16_v_i64m8(__VA_ARGS__) |
| #define | vsoxei16_v_i64m8_m(...) __riscv_vsoxei16_v_i64m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_i8m1(...) __riscv_vsoxei16_v_i8m1(__VA_ARGS__) |
| #define | vsoxei16_v_i8m1_m(...) __riscv_vsoxei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_i8m2(...) __riscv_vsoxei16_v_i8m2(__VA_ARGS__) |
| #define | vsoxei16_v_i8m2_m(...) __riscv_vsoxei16_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_i8m4(...) __riscv_vsoxei16_v_i8m4(__VA_ARGS__) |
| #define | vsoxei16_v_i8m4_m(...) __riscv_vsoxei16_v_i8m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_i8mf2(...) __riscv_vsoxei16_v_i8mf2(__VA_ARGS__) |
| #define | vsoxei16_v_i8mf2_m(...) __riscv_vsoxei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxei16_v_i8mf4(...) __riscv_vsoxei16_v_i8mf4(__VA_ARGS__) |
| #define | vsoxei16_v_i8mf4_m(...) __riscv_vsoxei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxei16_v_i8mf8(...) __riscv_vsoxei16_v_i8mf8(__VA_ARGS__) |
| #define | vsoxei16_v_i8mf8_m(...) __riscv_vsoxei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxei16_v_u16m1(...) __riscv_vsoxei16_v_u16m1(__VA_ARGS__) |
| #define | vsoxei16_v_u16m1_m(...) __riscv_vsoxei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_u16m2(...) __riscv_vsoxei16_v_u16m2(__VA_ARGS__) |
| #define | vsoxei16_v_u16m2_m(...) __riscv_vsoxei16_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_u16m4(...) __riscv_vsoxei16_v_u16m4(__VA_ARGS__) |
| #define | vsoxei16_v_u16m4_m(...) __riscv_vsoxei16_v_u16m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_u16m8(...) __riscv_vsoxei16_v_u16m8(__VA_ARGS__) |
| #define | vsoxei16_v_u16m8_m(...) __riscv_vsoxei16_v_u16m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_u16mf2(...) __riscv_vsoxei16_v_u16mf2(__VA_ARGS__) |
| #define | vsoxei16_v_u16mf2_m(...) __riscv_vsoxei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxei16_v_u16mf4(...) __riscv_vsoxei16_v_u16mf4(__VA_ARGS__) |
| #define | vsoxei16_v_u16mf4_m(...) __riscv_vsoxei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxei16_v_u32m1(...) __riscv_vsoxei16_v_u32m1(__VA_ARGS__) |
| #define | vsoxei16_v_u32m1_m(...) __riscv_vsoxei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_u32m2(...) __riscv_vsoxei16_v_u32m2(__VA_ARGS__) |
| #define | vsoxei16_v_u32m2_m(...) __riscv_vsoxei16_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_u32m4(...) __riscv_vsoxei16_v_u32m4(__VA_ARGS__) |
| #define | vsoxei16_v_u32m4_m(...) __riscv_vsoxei16_v_u32m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_u32m8(...) __riscv_vsoxei16_v_u32m8(__VA_ARGS__) |
| #define | vsoxei16_v_u32m8_m(...) __riscv_vsoxei16_v_u32m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_u32mf2(...) __riscv_vsoxei16_v_u32mf2(__VA_ARGS__) |
| #define | vsoxei16_v_u32mf2_m(...) __riscv_vsoxei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxei16_v_u64m1(...) __riscv_vsoxei16_v_u64m1(__VA_ARGS__) |
| #define | vsoxei16_v_u64m1_m(...) __riscv_vsoxei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_u64m2(...) __riscv_vsoxei16_v_u64m2(__VA_ARGS__) |
| #define | vsoxei16_v_u64m2_m(...) __riscv_vsoxei16_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_u64m4(...) __riscv_vsoxei16_v_u64m4(__VA_ARGS__) |
| #define | vsoxei16_v_u64m4_m(...) __riscv_vsoxei16_v_u64m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_u64m8(...) __riscv_vsoxei16_v_u64m8(__VA_ARGS__) |
| #define | vsoxei16_v_u64m8_m(...) __riscv_vsoxei16_v_u64m8_m(__VA_ARGS__) |
| #define | vsoxei16_v_u8m1(...) __riscv_vsoxei16_v_u8m1(__VA_ARGS__) |
| #define | vsoxei16_v_u8m1_m(...) __riscv_vsoxei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxei16_v_u8m2(...) __riscv_vsoxei16_v_u8m2(__VA_ARGS__) |
| #define | vsoxei16_v_u8m2_m(...) __riscv_vsoxei16_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxei16_v_u8m4(...) __riscv_vsoxei16_v_u8m4(__VA_ARGS__) |
| #define | vsoxei16_v_u8m4_m(...) __riscv_vsoxei16_v_u8m4_m(__VA_ARGS__) |
| #define | vsoxei16_v_u8mf2(...) __riscv_vsoxei16_v_u8mf2(__VA_ARGS__) |
| #define | vsoxei16_v_u8mf2_m(...) __riscv_vsoxei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxei16_v_u8mf4(...) __riscv_vsoxei16_v_u8mf4(__VA_ARGS__) |
| #define | vsoxei16_v_u8mf4_m(...) __riscv_vsoxei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxei16_v_u8mf8(...) __riscv_vsoxei16_v_u8mf8(__VA_ARGS__) |
| #define | vsoxei16_v_u8mf8_m(...) __riscv_vsoxei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxei32_v_f16m1(...) __riscv_vsoxei32_v_f16m1(__VA_ARGS__) |
| #define | vsoxei32_v_f16m1_m(...) __riscv_vsoxei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_f16m2(...) __riscv_vsoxei32_v_f16m2(__VA_ARGS__) |
| #define | vsoxei32_v_f16m2_m(...) __riscv_vsoxei32_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_f16m4(...) __riscv_vsoxei32_v_f16m4(__VA_ARGS__) |
| #define | vsoxei32_v_f16m4_m(...) __riscv_vsoxei32_v_f16m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_f16mf2(...) __riscv_vsoxei32_v_f16mf2(__VA_ARGS__) |
| #define | vsoxei32_v_f16mf2_m(...) __riscv_vsoxei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxei32_v_f16mf4(...) __riscv_vsoxei32_v_f16mf4(__VA_ARGS__) |
| #define | vsoxei32_v_f16mf4_m(...) __riscv_vsoxei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxei32_v_f32m1(...) __riscv_vsoxei32_v_f32m1(__VA_ARGS__) |
| #define | vsoxei32_v_f32m1_m(...) __riscv_vsoxei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_f32m2(...) __riscv_vsoxei32_v_f32m2(__VA_ARGS__) |
| #define | vsoxei32_v_f32m2_m(...) __riscv_vsoxei32_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_f32m4(...) __riscv_vsoxei32_v_f32m4(__VA_ARGS__) |
| #define | vsoxei32_v_f32m4_m(...) __riscv_vsoxei32_v_f32m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_f32m8(...) __riscv_vsoxei32_v_f32m8(__VA_ARGS__) |
| #define | vsoxei32_v_f32m8_m(...) __riscv_vsoxei32_v_f32m8_m(__VA_ARGS__) |
| #define | vsoxei32_v_f32mf2(...) __riscv_vsoxei32_v_f32mf2(__VA_ARGS__) |
| #define | vsoxei32_v_f32mf2_m(...) __riscv_vsoxei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxei32_v_f64m1(...) __riscv_vsoxei32_v_f64m1(__VA_ARGS__) |
| #define | vsoxei32_v_f64m1_m(...) __riscv_vsoxei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_f64m2(...) __riscv_vsoxei32_v_f64m2(__VA_ARGS__) |
| #define | vsoxei32_v_f64m2_m(...) __riscv_vsoxei32_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_f64m4(...) __riscv_vsoxei32_v_f64m4(__VA_ARGS__) |
| #define | vsoxei32_v_f64m4_m(...) __riscv_vsoxei32_v_f64m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_f64m8(...) __riscv_vsoxei32_v_f64m8(__VA_ARGS__) |
| #define | vsoxei32_v_f64m8_m(...) __riscv_vsoxei32_v_f64m8_m(__VA_ARGS__) |
| #define | vsoxei32_v_i16m1(...) __riscv_vsoxei32_v_i16m1(__VA_ARGS__) |
| #define | vsoxei32_v_i16m1_m(...) __riscv_vsoxei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_i16m2(...) __riscv_vsoxei32_v_i16m2(__VA_ARGS__) |
| #define | vsoxei32_v_i16m2_m(...) __riscv_vsoxei32_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_i16m4(...) __riscv_vsoxei32_v_i16m4(__VA_ARGS__) |
| #define | vsoxei32_v_i16m4_m(...) __riscv_vsoxei32_v_i16m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_i16mf2(...) __riscv_vsoxei32_v_i16mf2(__VA_ARGS__) |
| #define | vsoxei32_v_i16mf2_m(...) __riscv_vsoxei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxei32_v_i16mf4(...) __riscv_vsoxei32_v_i16mf4(__VA_ARGS__) |
| #define | vsoxei32_v_i16mf4_m(...) __riscv_vsoxei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxei32_v_i32m1(...) __riscv_vsoxei32_v_i32m1(__VA_ARGS__) |
| #define | vsoxei32_v_i32m1_m(...) __riscv_vsoxei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_i32m2(...) __riscv_vsoxei32_v_i32m2(__VA_ARGS__) |
| #define | vsoxei32_v_i32m2_m(...) __riscv_vsoxei32_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_i32m4(...) __riscv_vsoxei32_v_i32m4(__VA_ARGS__) |
| #define | vsoxei32_v_i32m4_m(...) __riscv_vsoxei32_v_i32m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_i32m8(...) __riscv_vsoxei32_v_i32m8(__VA_ARGS__) |
| #define | vsoxei32_v_i32m8_m(...) __riscv_vsoxei32_v_i32m8_m(__VA_ARGS__) |
| #define | vsoxei32_v_i32mf2(...) __riscv_vsoxei32_v_i32mf2(__VA_ARGS__) |
| #define | vsoxei32_v_i32mf2_m(...) __riscv_vsoxei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxei32_v_i64m1(...) __riscv_vsoxei32_v_i64m1(__VA_ARGS__) |
| #define | vsoxei32_v_i64m1_m(...) __riscv_vsoxei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_i64m2(...) __riscv_vsoxei32_v_i64m2(__VA_ARGS__) |
| #define | vsoxei32_v_i64m2_m(...) __riscv_vsoxei32_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_i64m4(...) __riscv_vsoxei32_v_i64m4(__VA_ARGS__) |
| #define | vsoxei32_v_i64m4_m(...) __riscv_vsoxei32_v_i64m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_i64m8(...) __riscv_vsoxei32_v_i64m8(__VA_ARGS__) |
| #define | vsoxei32_v_i64m8_m(...) __riscv_vsoxei32_v_i64m8_m(__VA_ARGS__) |
| #define | vsoxei32_v_i8m1(...) __riscv_vsoxei32_v_i8m1(__VA_ARGS__) |
| #define | vsoxei32_v_i8m1_m(...) __riscv_vsoxei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_i8m2(...) __riscv_vsoxei32_v_i8m2(__VA_ARGS__) |
| #define | vsoxei32_v_i8m2_m(...) __riscv_vsoxei32_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_i8mf2(...) __riscv_vsoxei32_v_i8mf2(__VA_ARGS__) |
| #define | vsoxei32_v_i8mf2_m(...) __riscv_vsoxei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxei32_v_i8mf4(...) __riscv_vsoxei32_v_i8mf4(__VA_ARGS__) |
| #define | vsoxei32_v_i8mf4_m(...) __riscv_vsoxei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxei32_v_i8mf8(...) __riscv_vsoxei32_v_i8mf8(__VA_ARGS__) |
| #define | vsoxei32_v_i8mf8_m(...) __riscv_vsoxei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxei32_v_u16m1(...) __riscv_vsoxei32_v_u16m1(__VA_ARGS__) |
| #define | vsoxei32_v_u16m1_m(...) __riscv_vsoxei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_u16m2(...) __riscv_vsoxei32_v_u16m2(__VA_ARGS__) |
| #define | vsoxei32_v_u16m2_m(...) __riscv_vsoxei32_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_u16m4(...) __riscv_vsoxei32_v_u16m4(__VA_ARGS__) |
| #define | vsoxei32_v_u16m4_m(...) __riscv_vsoxei32_v_u16m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_u16mf2(...) __riscv_vsoxei32_v_u16mf2(__VA_ARGS__) |
| #define | vsoxei32_v_u16mf2_m(...) __riscv_vsoxei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxei32_v_u16mf4(...) __riscv_vsoxei32_v_u16mf4(__VA_ARGS__) |
| #define | vsoxei32_v_u16mf4_m(...) __riscv_vsoxei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxei32_v_u32m1(...) __riscv_vsoxei32_v_u32m1(__VA_ARGS__) |
| #define | vsoxei32_v_u32m1_m(...) __riscv_vsoxei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_u32m2(...) __riscv_vsoxei32_v_u32m2(__VA_ARGS__) |
| #define | vsoxei32_v_u32m2_m(...) __riscv_vsoxei32_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_u32m4(...) __riscv_vsoxei32_v_u32m4(__VA_ARGS__) |
| #define | vsoxei32_v_u32m4_m(...) __riscv_vsoxei32_v_u32m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_u32m8(...) __riscv_vsoxei32_v_u32m8(__VA_ARGS__) |
| #define | vsoxei32_v_u32m8_m(...) __riscv_vsoxei32_v_u32m8_m(__VA_ARGS__) |
| #define | vsoxei32_v_u32mf2(...) __riscv_vsoxei32_v_u32mf2(__VA_ARGS__) |
| #define | vsoxei32_v_u32mf2_m(...) __riscv_vsoxei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxei32_v_u64m1(...) __riscv_vsoxei32_v_u64m1(__VA_ARGS__) |
| #define | vsoxei32_v_u64m1_m(...) __riscv_vsoxei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_u64m2(...) __riscv_vsoxei32_v_u64m2(__VA_ARGS__) |
| #define | vsoxei32_v_u64m2_m(...) __riscv_vsoxei32_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_u64m4(...) __riscv_vsoxei32_v_u64m4(__VA_ARGS__) |
| #define | vsoxei32_v_u64m4_m(...) __riscv_vsoxei32_v_u64m4_m(__VA_ARGS__) |
| #define | vsoxei32_v_u64m8(...) __riscv_vsoxei32_v_u64m8(__VA_ARGS__) |
| #define | vsoxei32_v_u64m8_m(...) __riscv_vsoxei32_v_u64m8_m(__VA_ARGS__) |
| #define | vsoxei32_v_u8m1(...) __riscv_vsoxei32_v_u8m1(__VA_ARGS__) |
| #define | vsoxei32_v_u8m1_m(...) __riscv_vsoxei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxei32_v_u8m2(...) __riscv_vsoxei32_v_u8m2(__VA_ARGS__) |
| #define | vsoxei32_v_u8m2_m(...) __riscv_vsoxei32_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxei32_v_u8mf2(...) __riscv_vsoxei32_v_u8mf2(__VA_ARGS__) |
| #define | vsoxei32_v_u8mf2_m(...) __riscv_vsoxei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxei32_v_u8mf4(...) __riscv_vsoxei32_v_u8mf4(__VA_ARGS__) |
| #define | vsoxei32_v_u8mf4_m(...) __riscv_vsoxei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxei32_v_u8mf8(...) __riscv_vsoxei32_v_u8mf8(__VA_ARGS__) |
| #define | vsoxei32_v_u8mf8_m(...) __riscv_vsoxei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxei64_v_f16m1(...) __riscv_vsoxei64_v_f16m1(__VA_ARGS__) |
| #define | vsoxei64_v_f16m1_m(...) __riscv_vsoxei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_f16m2(...) __riscv_vsoxei64_v_f16m2(__VA_ARGS__) |
| #define | vsoxei64_v_f16m2_m(...) __riscv_vsoxei64_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_f16mf2(...) __riscv_vsoxei64_v_f16mf2(__VA_ARGS__) |
| #define | vsoxei64_v_f16mf2_m(...) __riscv_vsoxei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxei64_v_f16mf4(...) __riscv_vsoxei64_v_f16mf4(__VA_ARGS__) |
| #define | vsoxei64_v_f16mf4_m(...) __riscv_vsoxei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxei64_v_f32m1(...) __riscv_vsoxei64_v_f32m1(__VA_ARGS__) |
| #define | vsoxei64_v_f32m1_m(...) __riscv_vsoxei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_f32m2(...) __riscv_vsoxei64_v_f32m2(__VA_ARGS__) |
| #define | vsoxei64_v_f32m2_m(...) __riscv_vsoxei64_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_f32m4(...) __riscv_vsoxei64_v_f32m4(__VA_ARGS__) |
| #define | vsoxei64_v_f32m4_m(...) __riscv_vsoxei64_v_f32m4_m(__VA_ARGS__) |
| #define | vsoxei64_v_f32mf2(...) __riscv_vsoxei64_v_f32mf2(__VA_ARGS__) |
| #define | vsoxei64_v_f32mf2_m(...) __riscv_vsoxei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxei64_v_f64m1(...) __riscv_vsoxei64_v_f64m1(__VA_ARGS__) |
| #define | vsoxei64_v_f64m1_m(...) __riscv_vsoxei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_f64m2(...) __riscv_vsoxei64_v_f64m2(__VA_ARGS__) |
| #define | vsoxei64_v_f64m2_m(...) __riscv_vsoxei64_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_f64m4(...) __riscv_vsoxei64_v_f64m4(__VA_ARGS__) |
| #define | vsoxei64_v_f64m4_m(...) __riscv_vsoxei64_v_f64m4_m(__VA_ARGS__) |
| #define | vsoxei64_v_f64m8(...) __riscv_vsoxei64_v_f64m8(__VA_ARGS__) |
| #define | vsoxei64_v_f64m8_m(...) __riscv_vsoxei64_v_f64m8_m(__VA_ARGS__) |
| #define | vsoxei64_v_i16m1(...) __riscv_vsoxei64_v_i16m1(__VA_ARGS__) |
| #define | vsoxei64_v_i16m1_m(...) __riscv_vsoxei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_i16m2(...) __riscv_vsoxei64_v_i16m2(__VA_ARGS__) |
| #define | vsoxei64_v_i16m2_m(...) __riscv_vsoxei64_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_i16mf2(...) __riscv_vsoxei64_v_i16mf2(__VA_ARGS__) |
| #define | vsoxei64_v_i16mf2_m(...) __riscv_vsoxei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxei64_v_i16mf4(...) __riscv_vsoxei64_v_i16mf4(__VA_ARGS__) |
| #define | vsoxei64_v_i16mf4_m(...) __riscv_vsoxei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxei64_v_i32m1(...) __riscv_vsoxei64_v_i32m1(__VA_ARGS__) |
| #define | vsoxei64_v_i32m1_m(...) __riscv_vsoxei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_i32m2(...) __riscv_vsoxei64_v_i32m2(__VA_ARGS__) |
| #define | vsoxei64_v_i32m2_m(...) __riscv_vsoxei64_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_i32m4(...) __riscv_vsoxei64_v_i32m4(__VA_ARGS__) |
| #define | vsoxei64_v_i32m4_m(...) __riscv_vsoxei64_v_i32m4_m(__VA_ARGS__) |
| #define | vsoxei64_v_i32mf2(...) __riscv_vsoxei64_v_i32mf2(__VA_ARGS__) |
| #define | vsoxei64_v_i32mf2_m(...) __riscv_vsoxei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxei64_v_i64m1(...) __riscv_vsoxei64_v_i64m1(__VA_ARGS__) |
| #define | vsoxei64_v_i64m1_m(...) __riscv_vsoxei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_i64m2(...) __riscv_vsoxei64_v_i64m2(__VA_ARGS__) |
| #define | vsoxei64_v_i64m2_m(...) __riscv_vsoxei64_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_i64m4(...) __riscv_vsoxei64_v_i64m4(__VA_ARGS__) |
| #define | vsoxei64_v_i64m4_m(...) __riscv_vsoxei64_v_i64m4_m(__VA_ARGS__) |
| #define | vsoxei64_v_i64m8(...) __riscv_vsoxei64_v_i64m8(__VA_ARGS__) |
| #define | vsoxei64_v_i64m8_m(...) __riscv_vsoxei64_v_i64m8_m(__VA_ARGS__) |
| #define | vsoxei64_v_i8m1(...) __riscv_vsoxei64_v_i8m1(__VA_ARGS__) |
| #define | vsoxei64_v_i8m1_m(...) __riscv_vsoxei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_i8mf2(...) __riscv_vsoxei64_v_i8mf2(__VA_ARGS__) |
| #define | vsoxei64_v_i8mf2_m(...) __riscv_vsoxei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxei64_v_i8mf4(...) __riscv_vsoxei64_v_i8mf4(__VA_ARGS__) |
| #define | vsoxei64_v_i8mf4_m(...) __riscv_vsoxei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxei64_v_i8mf8(...) __riscv_vsoxei64_v_i8mf8(__VA_ARGS__) |
| #define | vsoxei64_v_i8mf8_m(...) __riscv_vsoxei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxei64_v_u16m1(...) __riscv_vsoxei64_v_u16m1(__VA_ARGS__) |
| #define | vsoxei64_v_u16m1_m(...) __riscv_vsoxei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_u16m2(...) __riscv_vsoxei64_v_u16m2(__VA_ARGS__) |
| #define | vsoxei64_v_u16m2_m(...) __riscv_vsoxei64_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_u16mf2(...) __riscv_vsoxei64_v_u16mf2(__VA_ARGS__) |
| #define | vsoxei64_v_u16mf2_m(...) __riscv_vsoxei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxei64_v_u16mf4(...) __riscv_vsoxei64_v_u16mf4(__VA_ARGS__) |
| #define | vsoxei64_v_u16mf4_m(...) __riscv_vsoxei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxei64_v_u32m1(...) __riscv_vsoxei64_v_u32m1(__VA_ARGS__) |
| #define | vsoxei64_v_u32m1_m(...) __riscv_vsoxei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_u32m2(...) __riscv_vsoxei64_v_u32m2(__VA_ARGS__) |
| #define | vsoxei64_v_u32m2_m(...) __riscv_vsoxei64_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_u32m4(...) __riscv_vsoxei64_v_u32m4(__VA_ARGS__) |
| #define | vsoxei64_v_u32m4_m(...) __riscv_vsoxei64_v_u32m4_m(__VA_ARGS__) |
| #define | vsoxei64_v_u32mf2(...) __riscv_vsoxei64_v_u32mf2(__VA_ARGS__) |
| #define | vsoxei64_v_u32mf2_m(...) __riscv_vsoxei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxei64_v_u64m1(...) __riscv_vsoxei64_v_u64m1(__VA_ARGS__) |
| #define | vsoxei64_v_u64m1_m(...) __riscv_vsoxei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_u64m2(...) __riscv_vsoxei64_v_u64m2(__VA_ARGS__) |
| #define | vsoxei64_v_u64m2_m(...) __riscv_vsoxei64_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxei64_v_u64m4(...) __riscv_vsoxei64_v_u64m4(__VA_ARGS__) |
| #define | vsoxei64_v_u64m4_m(...) __riscv_vsoxei64_v_u64m4_m(__VA_ARGS__) |
| #define | vsoxei64_v_u64m8(...) __riscv_vsoxei64_v_u64m8(__VA_ARGS__) |
| #define | vsoxei64_v_u64m8_m(...) __riscv_vsoxei64_v_u64m8_m(__VA_ARGS__) |
| #define | vsoxei64_v_u8m1(...) __riscv_vsoxei64_v_u8m1(__VA_ARGS__) |
| #define | vsoxei64_v_u8m1_m(...) __riscv_vsoxei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxei64_v_u8mf2(...) __riscv_vsoxei64_v_u8mf2(__VA_ARGS__) |
| #define | vsoxei64_v_u8mf2_m(...) __riscv_vsoxei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxei64_v_u8mf4(...) __riscv_vsoxei64_v_u8mf4(__VA_ARGS__) |
| #define | vsoxei64_v_u8mf4_m(...) __riscv_vsoxei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxei64_v_u8mf8(...) __riscv_vsoxei64_v_u8mf8(__VA_ARGS__) |
| #define | vsoxei64_v_u8mf8_m(...) __riscv_vsoxei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxei8_v_f16m1(...) __riscv_vsoxei8_v_f16m1(__VA_ARGS__) |
| #define | vsoxei8_v_f16m1_m(...) __riscv_vsoxei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_f16m2(...) __riscv_vsoxei8_v_f16m2(__VA_ARGS__) |
| #define | vsoxei8_v_f16m2_m(...) __riscv_vsoxei8_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_f16m4(...) __riscv_vsoxei8_v_f16m4(__VA_ARGS__) |
| #define | vsoxei8_v_f16m4_m(...) __riscv_vsoxei8_v_f16m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_f16m8(...) __riscv_vsoxei8_v_f16m8(__VA_ARGS__) |
| #define | vsoxei8_v_f16m8_m(...) __riscv_vsoxei8_v_f16m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_f16mf2(...) __riscv_vsoxei8_v_f16mf2(__VA_ARGS__) |
| #define | vsoxei8_v_f16mf2_m(...) __riscv_vsoxei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxei8_v_f16mf4(...) __riscv_vsoxei8_v_f16mf4(__VA_ARGS__) |
| #define | vsoxei8_v_f16mf4_m(...) __riscv_vsoxei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxei8_v_f32m1(...) __riscv_vsoxei8_v_f32m1(__VA_ARGS__) |
| #define | vsoxei8_v_f32m1_m(...) __riscv_vsoxei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_f32m2(...) __riscv_vsoxei8_v_f32m2(__VA_ARGS__) |
| #define | vsoxei8_v_f32m2_m(...) __riscv_vsoxei8_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_f32m4(...) __riscv_vsoxei8_v_f32m4(__VA_ARGS__) |
| #define | vsoxei8_v_f32m4_m(...) __riscv_vsoxei8_v_f32m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_f32m8(...) __riscv_vsoxei8_v_f32m8(__VA_ARGS__) |
| #define | vsoxei8_v_f32m8_m(...) __riscv_vsoxei8_v_f32m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_f32mf2(...) __riscv_vsoxei8_v_f32mf2(__VA_ARGS__) |
| #define | vsoxei8_v_f32mf2_m(...) __riscv_vsoxei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxei8_v_f64m1(...) __riscv_vsoxei8_v_f64m1(__VA_ARGS__) |
| #define | vsoxei8_v_f64m1_m(...) __riscv_vsoxei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_f64m2(...) __riscv_vsoxei8_v_f64m2(__VA_ARGS__) |
| #define | vsoxei8_v_f64m2_m(...) __riscv_vsoxei8_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_f64m4(...) __riscv_vsoxei8_v_f64m4(__VA_ARGS__) |
| #define | vsoxei8_v_f64m4_m(...) __riscv_vsoxei8_v_f64m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_f64m8(...) __riscv_vsoxei8_v_f64m8(__VA_ARGS__) |
| #define | vsoxei8_v_f64m8_m(...) __riscv_vsoxei8_v_f64m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_i16m1(...) __riscv_vsoxei8_v_i16m1(__VA_ARGS__) |
| #define | vsoxei8_v_i16m1_m(...) __riscv_vsoxei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_i16m2(...) __riscv_vsoxei8_v_i16m2(__VA_ARGS__) |
| #define | vsoxei8_v_i16m2_m(...) __riscv_vsoxei8_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_i16m4(...) __riscv_vsoxei8_v_i16m4(__VA_ARGS__) |
| #define | vsoxei8_v_i16m4_m(...) __riscv_vsoxei8_v_i16m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_i16m8(...) __riscv_vsoxei8_v_i16m8(__VA_ARGS__) |
| #define | vsoxei8_v_i16m8_m(...) __riscv_vsoxei8_v_i16m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_i16mf2(...) __riscv_vsoxei8_v_i16mf2(__VA_ARGS__) |
| #define | vsoxei8_v_i16mf2_m(...) __riscv_vsoxei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxei8_v_i16mf4(...) __riscv_vsoxei8_v_i16mf4(__VA_ARGS__) |
| #define | vsoxei8_v_i16mf4_m(...) __riscv_vsoxei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxei8_v_i32m1(...) __riscv_vsoxei8_v_i32m1(__VA_ARGS__) |
| #define | vsoxei8_v_i32m1_m(...) __riscv_vsoxei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_i32m2(...) __riscv_vsoxei8_v_i32m2(__VA_ARGS__) |
| #define | vsoxei8_v_i32m2_m(...) __riscv_vsoxei8_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_i32m4(...) __riscv_vsoxei8_v_i32m4(__VA_ARGS__) |
| #define | vsoxei8_v_i32m4_m(...) __riscv_vsoxei8_v_i32m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_i32m8(...) __riscv_vsoxei8_v_i32m8(__VA_ARGS__) |
| #define | vsoxei8_v_i32m8_m(...) __riscv_vsoxei8_v_i32m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_i32mf2(...) __riscv_vsoxei8_v_i32mf2(__VA_ARGS__) |
| #define | vsoxei8_v_i32mf2_m(...) __riscv_vsoxei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxei8_v_i64m1(...) __riscv_vsoxei8_v_i64m1(__VA_ARGS__) |
| #define | vsoxei8_v_i64m1_m(...) __riscv_vsoxei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_i64m2(...) __riscv_vsoxei8_v_i64m2(__VA_ARGS__) |
| #define | vsoxei8_v_i64m2_m(...) __riscv_vsoxei8_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_i64m4(...) __riscv_vsoxei8_v_i64m4(__VA_ARGS__) |
| #define | vsoxei8_v_i64m4_m(...) __riscv_vsoxei8_v_i64m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_i64m8(...) __riscv_vsoxei8_v_i64m8(__VA_ARGS__) |
| #define | vsoxei8_v_i64m8_m(...) __riscv_vsoxei8_v_i64m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_i8m1(...) __riscv_vsoxei8_v_i8m1(__VA_ARGS__) |
| #define | vsoxei8_v_i8m1_m(...) __riscv_vsoxei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_i8m2(...) __riscv_vsoxei8_v_i8m2(__VA_ARGS__) |
| #define | vsoxei8_v_i8m2_m(...) __riscv_vsoxei8_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_i8m4(...) __riscv_vsoxei8_v_i8m4(__VA_ARGS__) |
| #define | vsoxei8_v_i8m4_m(...) __riscv_vsoxei8_v_i8m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_i8m8(...) __riscv_vsoxei8_v_i8m8(__VA_ARGS__) |
| #define | vsoxei8_v_i8m8_m(...) __riscv_vsoxei8_v_i8m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_i8mf2(...) __riscv_vsoxei8_v_i8mf2(__VA_ARGS__) |
| #define | vsoxei8_v_i8mf2_m(...) __riscv_vsoxei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxei8_v_i8mf4(...) __riscv_vsoxei8_v_i8mf4(__VA_ARGS__) |
| #define | vsoxei8_v_i8mf4_m(...) __riscv_vsoxei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxei8_v_i8mf8(...) __riscv_vsoxei8_v_i8mf8(__VA_ARGS__) |
| #define | vsoxei8_v_i8mf8_m(...) __riscv_vsoxei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxei8_v_u16m1(...) __riscv_vsoxei8_v_u16m1(__VA_ARGS__) |
| #define | vsoxei8_v_u16m1_m(...) __riscv_vsoxei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_u16m2(...) __riscv_vsoxei8_v_u16m2(__VA_ARGS__) |
| #define | vsoxei8_v_u16m2_m(...) __riscv_vsoxei8_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_u16m4(...) __riscv_vsoxei8_v_u16m4(__VA_ARGS__) |
| #define | vsoxei8_v_u16m4_m(...) __riscv_vsoxei8_v_u16m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_u16m8(...) __riscv_vsoxei8_v_u16m8(__VA_ARGS__) |
| #define | vsoxei8_v_u16m8_m(...) __riscv_vsoxei8_v_u16m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_u16mf2(...) __riscv_vsoxei8_v_u16mf2(__VA_ARGS__) |
| #define | vsoxei8_v_u16mf2_m(...) __riscv_vsoxei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxei8_v_u16mf4(...) __riscv_vsoxei8_v_u16mf4(__VA_ARGS__) |
| #define | vsoxei8_v_u16mf4_m(...) __riscv_vsoxei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxei8_v_u32m1(...) __riscv_vsoxei8_v_u32m1(__VA_ARGS__) |
| #define | vsoxei8_v_u32m1_m(...) __riscv_vsoxei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_u32m2(...) __riscv_vsoxei8_v_u32m2(__VA_ARGS__) |
| #define | vsoxei8_v_u32m2_m(...) __riscv_vsoxei8_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_u32m4(...) __riscv_vsoxei8_v_u32m4(__VA_ARGS__) |
| #define | vsoxei8_v_u32m4_m(...) __riscv_vsoxei8_v_u32m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_u32m8(...) __riscv_vsoxei8_v_u32m8(__VA_ARGS__) |
| #define | vsoxei8_v_u32m8_m(...) __riscv_vsoxei8_v_u32m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_u32mf2(...) __riscv_vsoxei8_v_u32mf2(__VA_ARGS__) |
| #define | vsoxei8_v_u32mf2_m(...) __riscv_vsoxei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxei8_v_u64m1(...) __riscv_vsoxei8_v_u64m1(__VA_ARGS__) |
| #define | vsoxei8_v_u64m1_m(...) __riscv_vsoxei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_u64m2(...) __riscv_vsoxei8_v_u64m2(__VA_ARGS__) |
| #define | vsoxei8_v_u64m2_m(...) __riscv_vsoxei8_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_u64m4(...) __riscv_vsoxei8_v_u64m4(__VA_ARGS__) |
| #define | vsoxei8_v_u64m4_m(...) __riscv_vsoxei8_v_u64m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_u64m8(...) __riscv_vsoxei8_v_u64m8(__VA_ARGS__) |
| #define | vsoxei8_v_u64m8_m(...) __riscv_vsoxei8_v_u64m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_u8m1(...) __riscv_vsoxei8_v_u8m1(__VA_ARGS__) |
| #define | vsoxei8_v_u8m1_m(...) __riscv_vsoxei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxei8_v_u8m2(...) __riscv_vsoxei8_v_u8m2(__VA_ARGS__) |
| #define | vsoxei8_v_u8m2_m(...) __riscv_vsoxei8_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxei8_v_u8m4(...) __riscv_vsoxei8_v_u8m4(__VA_ARGS__) |
| #define | vsoxei8_v_u8m4_m(...) __riscv_vsoxei8_v_u8m4_m(__VA_ARGS__) |
| #define | vsoxei8_v_u8m8(...) __riscv_vsoxei8_v_u8m8(__VA_ARGS__) |
| #define | vsoxei8_v_u8m8_m(...) __riscv_vsoxei8_v_u8m8_m(__VA_ARGS__) |
| #define | vsoxei8_v_u8mf2(...) __riscv_vsoxei8_v_u8mf2(__VA_ARGS__) |
| #define | vsoxei8_v_u8mf2_m(...) __riscv_vsoxei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxei8_v_u8mf4(...) __riscv_vsoxei8_v_u8mf4(__VA_ARGS__) |
| #define | vsoxei8_v_u8mf4_m(...) __riscv_vsoxei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxei8_v_u8mf8(...) __riscv_vsoxei8_v_u8mf8(__VA_ARGS__) |
| #define | vsoxei8_v_u8mf8_m(...) __riscv_vsoxei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16m1(...) __riscv_vsoxseg2ei16_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16m1_m(...) __riscv_vsoxseg2ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16m2(...) __riscv_vsoxseg2ei16_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16m2_m(...) __riscv_vsoxseg2ei16_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16m4(...) __riscv_vsoxseg2ei16_v_f16m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16m4_m(...) __riscv_vsoxseg2ei16_v_f16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16mf2(...) __riscv_vsoxseg2ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16mf2_m(...) __riscv_vsoxseg2ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16mf4(...) __riscv_vsoxseg2ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f16mf4_m(...) __riscv_vsoxseg2ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f32m1(...) __riscv_vsoxseg2ei16_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f32m1_m(...) __riscv_vsoxseg2ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f32m2(...) __riscv_vsoxseg2ei16_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f32m2_m(...) __riscv_vsoxseg2ei16_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f32m4(...) __riscv_vsoxseg2ei16_v_f32m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f32m4_m(...) __riscv_vsoxseg2ei16_v_f32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f32mf2(...) __riscv_vsoxseg2ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f32mf2_m(...) __riscv_vsoxseg2ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f64m1(...) __riscv_vsoxseg2ei16_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f64m1_m(...) __riscv_vsoxseg2ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f64m2(...) __riscv_vsoxseg2ei16_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f64m2_m(...) __riscv_vsoxseg2ei16_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f64m4(...) __riscv_vsoxseg2ei16_v_f64m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_f64m4_m(...) __riscv_vsoxseg2ei16_v_f64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16m1(...) __riscv_vsoxseg2ei16_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16m1_m(...) __riscv_vsoxseg2ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16m2(...) __riscv_vsoxseg2ei16_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16m2_m(...) __riscv_vsoxseg2ei16_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16m4(...) __riscv_vsoxseg2ei16_v_i16m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16m4_m(...) __riscv_vsoxseg2ei16_v_i16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16mf2(...) __riscv_vsoxseg2ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16mf2_m(...) __riscv_vsoxseg2ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16mf4(...) __riscv_vsoxseg2ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i16mf4_m(...) __riscv_vsoxseg2ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i32m1(...) __riscv_vsoxseg2ei16_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i32m1_m(...) __riscv_vsoxseg2ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i32m2(...) __riscv_vsoxseg2ei16_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i32m2_m(...) __riscv_vsoxseg2ei16_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i32m4(...) __riscv_vsoxseg2ei16_v_i32m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i32m4_m(...) __riscv_vsoxseg2ei16_v_i32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i32mf2(...) __riscv_vsoxseg2ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i32mf2_m(...) __riscv_vsoxseg2ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i64m1(...) __riscv_vsoxseg2ei16_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i64m1_m(...) __riscv_vsoxseg2ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i64m2(...) __riscv_vsoxseg2ei16_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i64m2_m(...) __riscv_vsoxseg2ei16_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i64m4(...) __riscv_vsoxseg2ei16_v_i64m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i64m4_m(...) __riscv_vsoxseg2ei16_v_i64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8m1(...) __riscv_vsoxseg2ei16_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8m1_m(...) __riscv_vsoxseg2ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8m2(...) __riscv_vsoxseg2ei16_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8m2_m(...) __riscv_vsoxseg2ei16_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8m4(...) __riscv_vsoxseg2ei16_v_i8m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8m4_m(...) __riscv_vsoxseg2ei16_v_i8m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8mf2(...) __riscv_vsoxseg2ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8mf2_m(...) __riscv_vsoxseg2ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8mf4(...) __riscv_vsoxseg2ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8mf4_m(...) __riscv_vsoxseg2ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8mf8(...) __riscv_vsoxseg2ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_i8mf8_m(...) __riscv_vsoxseg2ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16m1(...) __riscv_vsoxseg2ei16_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16m1_m(...) __riscv_vsoxseg2ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16m2(...) __riscv_vsoxseg2ei16_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16m2_m(...) __riscv_vsoxseg2ei16_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16m4(...) __riscv_vsoxseg2ei16_v_u16m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16m4_m(...) __riscv_vsoxseg2ei16_v_u16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16mf2(...) __riscv_vsoxseg2ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16mf2_m(...) __riscv_vsoxseg2ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16mf4(...) __riscv_vsoxseg2ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u16mf4_m(...) __riscv_vsoxseg2ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u32m1(...) __riscv_vsoxseg2ei16_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u32m1_m(...) __riscv_vsoxseg2ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u32m2(...) __riscv_vsoxseg2ei16_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u32m2_m(...) __riscv_vsoxseg2ei16_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u32m4(...) __riscv_vsoxseg2ei16_v_u32m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u32m4_m(...) __riscv_vsoxseg2ei16_v_u32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u32mf2(...) __riscv_vsoxseg2ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u32mf2_m(...) __riscv_vsoxseg2ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u64m1(...) __riscv_vsoxseg2ei16_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u64m1_m(...) __riscv_vsoxseg2ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u64m2(...) __riscv_vsoxseg2ei16_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u64m2_m(...) __riscv_vsoxseg2ei16_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u64m4(...) __riscv_vsoxseg2ei16_v_u64m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u64m4_m(...) __riscv_vsoxseg2ei16_v_u64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8m1(...) __riscv_vsoxseg2ei16_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8m1_m(...) __riscv_vsoxseg2ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8m2(...) __riscv_vsoxseg2ei16_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8m2_m(...) __riscv_vsoxseg2ei16_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8m4(...) __riscv_vsoxseg2ei16_v_u8m4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8m4_m(...) __riscv_vsoxseg2ei16_v_u8m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8mf2(...) __riscv_vsoxseg2ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8mf2_m(...) __riscv_vsoxseg2ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8mf4(...) __riscv_vsoxseg2ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8mf4_m(...) __riscv_vsoxseg2ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8mf8(...) __riscv_vsoxseg2ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg2ei16_v_u8mf8_m(...) __riscv_vsoxseg2ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16m1(...) __riscv_vsoxseg2ei32_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16m1_m(...) __riscv_vsoxseg2ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16m2(...) __riscv_vsoxseg2ei32_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16m2_m(...) __riscv_vsoxseg2ei32_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16m4(...) __riscv_vsoxseg2ei32_v_f16m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16m4_m(...) __riscv_vsoxseg2ei32_v_f16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16mf2(...) __riscv_vsoxseg2ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16mf2_m(...) __riscv_vsoxseg2ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16mf4(...) __riscv_vsoxseg2ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f16mf4_m(...) __riscv_vsoxseg2ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f32m1(...) __riscv_vsoxseg2ei32_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f32m1_m(...) __riscv_vsoxseg2ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f32m2(...) __riscv_vsoxseg2ei32_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f32m2_m(...) __riscv_vsoxseg2ei32_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f32m4(...) __riscv_vsoxseg2ei32_v_f32m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f32m4_m(...) __riscv_vsoxseg2ei32_v_f32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f32mf2(...) __riscv_vsoxseg2ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f32mf2_m(...) __riscv_vsoxseg2ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f64m1(...) __riscv_vsoxseg2ei32_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f64m1_m(...) __riscv_vsoxseg2ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f64m2(...) __riscv_vsoxseg2ei32_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f64m2_m(...) __riscv_vsoxseg2ei32_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f64m4(...) __riscv_vsoxseg2ei32_v_f64m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_f64m4_m(...) __riscv_vsoxseg2ei32_v_f64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16m1(...) __riscv_vsoxseg2ei32_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16m1_m(...) __riscv_vsoxseg2ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16m2(...) __riscv_vsoxseg2ei32_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16m2_m(...) __riscv_vsoxseg2ei32_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16m4(...) __riscv_vsoxseg2ei32_v_i16m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16m4_m(...) __riscv_vsoxseg2ei32_v_i16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16mf2(...) __riscv_vsoxseg2ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16mf2_m(...) __riscv_vsoxseg2ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16mf4(...) __riscv_vsoxseg2ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i16mf4_m(...) __riscv_vsoxseg2ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i32m1(...) __riscv_vsoxseg2ei32_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i32m1_m(...) __riscv_vsoxseg2ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i32m2(...) __riscv_vsoxseg2ei32_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i32m2_m(...) __riscv_vsoxseg2ei32_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i32m4(...) __riscv_vsoxseg2ei32_v_i32m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i32m4_m(...) __riscv_vsoxseg2ei32_v_i32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i32mf2(...) __riscv_vsoxseg2ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i32mf2_m(...) __riscv_vsoxseg2ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i64m1(...) __riscv_vsoxseg2ei32_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i64m1_m(...) __riscv_vsoxseg2ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i64m2(...) __riscv_vsoxseg2ei32_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i64m2_m(...) __riscv_vsoxseg2ei32_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i64m4(...) __riscv_vsoxseg2ei32_v_i64m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i64m4_m(...) __riscv_vsoxseg2ei32_v_i64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8m1(...) __riscv_vsoxseg2ei32_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8m1_m(...) __riscv_vsoxseg2ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8m2(...) __riscv_vsoxseg2ei32_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8m2_m(...) __riscv_vsoxseg2ei32_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8mf2(...) __riscv_vsoxseg2ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8mf2_m(...) __riscv_vsoxseg2ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8mf4(...) __riscv_vsoxseg2ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8mf4_m(...) __riscv_vsoxseg2ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8mf8(...) __riscv_vsoxseg2ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_i8mf8_m(...) __riscv_vsoxseg2ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16m1(...) __riscv_vsoxseg2ei32_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16m1_m(...) __riscv_vsoxseg2ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16m2(...) __riscv_vsoxseg2ei32_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16m2_m(...) __riscv_vsoxseg2ei32_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16m4(...) __riscv_vsoxseg2ei32_v_u16m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16m4_m(...) __riscv_vsoxseg2ei32_v_u16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16mf2(...) __riscv_vsoxseg2ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16mf2_m(...) __riscv_vsoxseg2ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16mf4(...) __riscv_vsoxseg2ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u16mf4_m(...) __riscv_vsoxseg2ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u32m1(...) __riscv_vsoxseg2ei32_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u32m1_m(...) __riscv_vsoxseg2ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u32m2(...) __riscv_vsoxseg2ei32_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u32m2_m(...) __riscv_vsoxseg2ei32_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u32m4(...) __riscv_vsoxseg2ei32_v_u32m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u32m4_m(...) __riscv_vsoxseg2ei32_v_u32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u32mf2(...) __riscv_vsoxseg2ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u32mf2_m(...) __riscv_vsoxseg2ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u64m1(...) __riscv_vsoxseg2ei32_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u64m1_m(...) __riscv_vsoxseg2ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u64m2(...) __riscv_vsoxseg2ei32_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u64m2_m(...) __riscv_vsoxseg2ei32_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u64m4(...) __riscv_vsoxseg2ei32_v_u64m4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u64m4_m(...) __riscv_vsoxseg2ei32_v_u64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8m1(...) __riscv_vsoxseg2ei32_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8m1_m(...) __riscv_vsoxseg2ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8m2(...) __riscv_vsoxseg2ei32_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8m2_m(...) __riscv_vsoxseg2ei32_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8mf2(...) __riscv_vsoxseg2ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8mf2_m(...) __riscv_vsoxseg2ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8mf4(...) __riscv_vsoxseg2ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8mf4_m(...) __riscv_vsoxseg2ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8mf8(...) __riscv_vsoxseg2ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg2ei32_v_u8mf8_m(...) __riscv_vsoxseg2ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f16m1(...) __riscv_vsoxseg2ei64_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f16m1_m(...) __riscv_vsoxseg2ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f16m2(...) __riscv_vsoxseg2ei64_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f16m2_m(...) __riscv_vsoxseg2ei64_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f16mf2(...) __riscv_vsoxseg2ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f16mf2_m(...) __riscv_vsoxseg2ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f16mf4(...) __riscv_vsoxseg2ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f16mf4_m(...) __riscv_vsoxseg2ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f32m1(...) __riscv_vsoxseg2ei64_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f32m1_m(...) __riscv_vsoxseg2ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f32m2(...) __riscv_vsoxseg2ei64_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f32m2_m(...) __riscv_vsoxseg2ei64_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f32m4(...) __riscv_vsoxseg2ei64_v_f32m4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f32m4_m(...) __riscv_vsoxseg2ei64_v_f32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f32mf2(...) __riscv_vsoxseg2ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f32mf2_m(...) __riscv_vsoxseg2ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f64m1(...) __riscv_vsoxseg2ei64_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f64m1_m(...) __riscv_vsoxseg2ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f64m2(...) __riscv_vsoxseg2ei64_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f64m2_m(...) __riscv_vsoxseg2ei64_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f64m4(...) __riscv_vsoxseg2ei64_v_f64m4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_f64m4_m(...) __riscv_vsoxseg2ei64_v_f64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i16m1(...) __riscv_vsoxseg2ei64_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i16m1_m(...) __riscv_vsoxseg2ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i16m2(...) __riscv_vsoxseg2ei64_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i16m2_m(...) __riscv_vsoxseg2ei64_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i16mf2(...) __riscv_vsoxseg2ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i16mf2_m(...) __riscv_vsoxseg2ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i16mf4(...) __riscv_vsoxseg2ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i16mf4_m(...) __riscv_vsoxseg2ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i32m1(...) __riscv_vsoxseg2ei64_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i32m1_m(...) __riscv_vsoxseg2ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i32m2(...) __riscv_vsoxseg2ei64_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i32m2_m(...) __riscv_vsoxseg2ei64_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i32m4(...) __riscv_vsoxseg2ei64_v_i32m4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i32m4_m(...) __riscv_vsoxseg2ei64_v_i32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i32mf2(...) __riscv_vsoxseg2ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i32mf2_m(...) __riscv_vsoxseg2ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i64m1(...) __riscv_vsoxseg2ei64_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i64m1_m(...) __riscv_vsoxseg2ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i64m2(...) __riscv_vsoxseg2ei64_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i64m2_m(...) __riscv_vsoxseg2ei64_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i64m4(...) __riscv_vsoxseg2ei64_v_i64m4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i64m4_m(...) __riscv_vsoxseg2ei64_v_i64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i8m1(...) __riscv_vsoxseg2ei64_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i8m1_m(...) __riscv_vsoxseg2ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i8mf2(...) __riscv_vsoxseg2ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i8mf2_m(...) __riscv_vsoxseg2ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i8mf4(...) __riscv_vsoxseg2ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i8mf4_m(...) __riscv_vsoxseg2ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i8mf8(...) __riscv_vsoxseg2ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_i8mf8_m(...) __riscv_vsoxseg2ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u16m1(...) __riscv_vsoxseg2ei64_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u16m1_m(...) __riscv_vsoxseg2ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u16m2(...) __riscv_vsoxseg2ei64_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u16m2_m(...) __riscv_vsoxseg2ei64_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u16mf2(...) __riscv_vsoxseg2ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u16mf2_m(...) __riscv_vsoxseg2ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u16mf4(...) __riscv_vsoxseg2ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u16mf4_m(...) __riscv_vsoxseg2ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u32m1(...) __riscv_vsoxseg2ei64_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u32m1_m(...) __riscv_vsoxseg2ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u32m2(...) __riscv_vsoxseg2ei64_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u32m2_m(...) __riscv_vsoxseg2ei64_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u32m4(...) __riscv_vsoxseg2ei64_v_u32m4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u32m4_m(...) __riscv_vsoxseg2ei64_v_u32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u32mf2(...) __riscv_vsoxseg2ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u32mf2_m(...) __riscv_vsoxseg2ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u64m1(...) __riscv_vsoxseg2ei64_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u64m1_m(...) __riscv_vsoxseg2ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u64m2(...) __riscv_vsoxseg2ei64_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u64m2_m(...) __riscv_vsoxseg2ei64_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u64m4(...) __riscv_vsoxseg2ei64_v_u64m4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u64m4_m(...) __riscv_vsoxseg2ei64_v_u64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u8m1(...) __riscv_vsoxseg2ei64_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u8m1_m(...) __riscv_vsoxseg2ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u8mf2(...) __riscv_vsoxseg2ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u8mf2_m(...) __riscv_vsoxseg2ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u8mf4(...) __riscv_vsoxseg2ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u8mf4_m(...) __riscv_vsoxseg2ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u8mf8(...) __riscv_vsoxseg2ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg2ei64_v_u8mf8_m(...) __riscv_vsoxseg2ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16m1(...) __riscv_vsoxseg2ei8_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16m1_m(...) __riscv_vsoxseg2ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16m2(...) __riscv_vsoxseg2ei8_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16m2_m(...) __riscv_vsoxseg2ei8_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16m4(...) __riscv_vsoxseg2ei8_v_f16m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16m4_m(...) __riscv_vsoxseg2ei8_v_f16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16mf2(...) __riscv_vsoxseg2ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16mf2_m(...) __riscv_vsoxseg2ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16mf4(...) __riscv_vsoxseg2ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f16mf4_m(...) __riscv_vsoxseg2ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f32m1(...) __riscv_vsoxseg2ei8_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f32m1_m(...) __riscv_vsoxseg2ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f32m2(...) __riscv_vsoxseg2ei8_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f32m2_m(...) __riscv_vsoxseg2ei8_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f32m4(...) __riscv_vsoxseg2ei8_v_f32m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f32m4_m(...) __riscv_vsoxseg2ei8_v_f32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f32mf2(...) __riscv_vsoxseg2ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f32mf2_m(...) __riscv_vsoxseg2ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f64m1(...) __riscv_vsoxseg2ei8_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f64m1_m(...) __riscv_vsoxseg2ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f64m2(...) __riscv_vsoxseg2ei8_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f64m2_m(...) __riscv_vsoxseg2ei8_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f64m4(...) __riscv_vsoxseg2ei8_v_f64m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_f64m4_m(...) __riscv_vsoxseg2ei8_v_f64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16m1(...) __riscv_vsoxseg2ei8_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16m1_m(...) __riscv_vsoxseg2ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16m2(...) __riscv_vsoxseg2ei8_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16m2_m(...) __riscv_vsoxseg2ei8_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16m4(...) __riscv_vsoxseg2ei8_v_i16m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16m4_m(...) __riscv_vsoxseg2ei8_v_i16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16mf2(...) __riscv_vsoxseg2ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16mf2_m(...) __riscv_vsoxseg2ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16mf4(...) __riscv_vsoxseg2ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i16mf4_m(...) __riscv_vsoxseg2ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i32m1(...) __riscv_vsoxseg2ei8_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i32m1_m(...) __riscv_vsoxseg2ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i32m2(...) __riscv_vsoxseg2ei8_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i32m2_m(...) __riscv_vsoxseg2ei8_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i32m4(...) __riscv_vsoxseg2ei8_v_i32m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i32m4_m(...) __riscv_vsoxseg2ei8_v_i32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i32mf2(...) __riscv_vsoxseg2ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i32mf2_m(...) __riscv_vsoxseg2ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i64m1(...) __riscv_vsoxseg2ei8_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i64m1_m(...) __riscv_vsoxseg2ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i64m2(...) __riscv_vsoxseg2ei8_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i64m2_m(...) __riscv_vsoxseg2ei8_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i64m4(...) __riscv_vsoxseg2ei8_v_i64m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i64m4_m(...) __riscv_vsoxseg2ei8_v_i64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8m1(...) __riscv_vsoxseg2ei8_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8m1_m(...) __riscv_vsoxseg2ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8m2(...) __riscv_vsoxseg2ei8_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8m2_m(...) __riscv_vsoxseg2ei8_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8m4(...) __riscv_vsoxseg2ei8_v_i8m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8m4_m(...) __riscv_vsoxseg2ei8_v_i8m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8mf2(...) __riscv_vsoxseg2ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8mf2_m(...) __riscv_vsoxseg2ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8mf4(...) __riscv_vsoxseg2ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8mf4_m(...) __riscv_vsoxseg2ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8mf8(...) __riscv_vsoxseg2ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_i8mf8_m(...) __riscv_vsoxseg2ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16m1(...) __riscv_vsoxseg2ei8_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16m1_m(...) __riscv_vsoxseg2ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16m2(...) __riscv_vsoxseg2ei8_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16m2_m(...) __riscv_vsoxseg2ei8_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16m4(...) __riscv_vsoxseg2ei8_v_u16m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16m4_m(...) __riscv_vsoxseg2ei8_v_u16m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16mf2(...) __riscv_vsoxseg2ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16mf2_m(...) __riscv_vsoxseg2ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16mf4(...) __riscv_vsoxseg2ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u16mf4_m(...) __riscv_vsoxseg2ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u32m1(...) __riscv_vsoxseg2ei8_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u32m1_m(...) __riscv_vsoxseg2ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u32m2(...) __riscv_vsoxseg2ei8_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u32m2_m(...) __riscv_vsoxseg2ei8_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u32m4(...) __riscv_vsoxseg2ei8_v_u32m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u32m4_m(...) __riscv_vsoxseg2ei8_v_u32m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u32mf2(...) __riscv_vsoxseg2ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u32mf2_m(...) __riscv_vsoxseg2ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u64m1(...) __riscv_vsoxseg2ei8_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u64m1_m(...) __riscv_vsoxseg2ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u64m2(...) __riscv_vsoxseg2ei8_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u64m2_m(...) __riscv_vsoxseg2ei8_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u64m4(...) __riscv_vsoxseg2ei8_v_u64m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u64m4_m(...) __riscv_vsoxseg2ei8_v_u64m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8m1(...) __riscv_vsoxseg2ei8_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8m1_m(...) __riscv_vsoxseg2ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8m2(...) __riscv_vsoxseg2ei8_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8m2_m(...) __riscv_vsoxseg2ei8_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8m4(...) __riscv_vsoxseg2ei8_v_u8m4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8m4_m(...) __riscv_vsoxseg2ei8_v_u8m4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8mf2(...) __riscv_vsoxseg2ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8mf2_m(...) __riscv_vsoxseg2ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8mf4(...) __riscv_vsoxseg2ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8mf4_m(...) __riscv_vsoxseg2ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8mf8(...) __riscv_vsoxseg2ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg2ei8_v_u8mf8_m(...) __riscv_vsoxseg2ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f16m1(...) __riscv_vsoxseg3ei16_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f16m1_m(...) __riscv_vsoxseg3ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f16m2(...) __riscv_vsoxseg3ei16_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f16m2_m(...) __riscv_vsoxseg3ei16_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f16mf2(...) __riscv_vsoxseg3ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f16mf2_m(...) __riscv_vsoxseg3ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f16mf4(...) __riscv_vsoxseg3ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f16mf4_m(...) __riscv_vsoxseg3ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f32m1(...) __riscv_vsoxseg3ei16_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f32m1_m(...) __riscv_vsoxseg3ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f32m2(...) __riscv_vsoxseg3ei16_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f32m2_m(...) __riscv_vsoxseg3ei16_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f32mf2(...) __riscv_vsoxseg3ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f32mf2_m(...) __riscv_vsoxseg3ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f64m1(...) __riscv_vsoxseg3ei16_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f64m1_m(...) __riscv_vsoxseg3ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f64m2(...) __riscv_vsoxseg3ei16_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_f64m2_m(...) __riscv_vsoxseg3ei16_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i16m1(...) __riscv_vsoxseg3ei16_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i16m1_m(...) __riscv_vsoxseg3ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i16m2(...) __riscv_vsoxseg3ei16_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i16m2_m(...) __riscv_vsoxseg3ei16_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i16mf2(...) __riscv_vsoxseg3ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i16mf2_m(...) __riscv_vsoxseg3ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i16mf4(...) __riscv_vsoxseg3ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i16mf4_m(...) __riscv_vsoxseg3ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i32m1(...) __riscv_vsoxseg3ei16_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i32m1_m(...) __riscv_vsoxseg3ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i32m2(...) __riscv_vsoxseg3ei16_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i32m2_m(...) __riscv_vsoxseg3ei16_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i32mf2(...) __riscv_vsoxseg3ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i32mf2_m(...) __riscv_vsoxseg3ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i64m1(...) __riscv_vsoxseg3ei16_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i64m1_m(...) __riscv_vsoxseg3ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i64m2(...) __riscv_vsoxseg3ei16_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i64m2_m(...) __riscv_vsoxseg3ei16_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8m1(...) __riscv_vsoxseg3ei16_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8m1_m(...) __riscv_vsoxseg3ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8m2(...) __riscv_vsoxseg3ei16_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8m2_m(...) __riscv_vsoxseg3ei16_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8mf2(...) __riscv_vsoxseg3ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8mf2_m(...) __riscv_vsoxseg3ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8mf4(...) __riscv_vsoxseg3ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8mf4_m(...) __riscv_vsoxseg3ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8mf8(...) __riscv_vsoxseg3ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_i8mf8_m(...) __riscv_vsoxseg3ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u16m1(...) __riscv_vsoxseg3ei16_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u16m1_m(...) __riscv_vsoxseg3ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u16m2(...) __riscv_vsoxseg3ei16_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u16m2_m(...) __riscv_vsoxseg3ei16_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u16mf2(...) __riscv_vsoxseg3ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u16mf2_m(...) __riscv_vsoxseg3ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u16mf4(...) __riscv_vsoxseg3ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u16mf4_m(...) __riscv_vsoxseg3ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u32m1(...) __riscv_vsoxseg3ei16_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u32m1_m(...) __riscv_vsoxseg3ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u32m2(...) __riscv_vsoxseg3ei16_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u32m2_m(...) __riscv_vsoxseg3ei16_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u32mf2(...) __riscv_vsoxseg3ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u32mf2_m(...) __riscv_vsoxseg3ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u64m1(...) __riscv_vsoxseg3ei16_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u64m1_m(...) __riscv_vsoxseg3ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u64m2(...) __riscv_vsoxseg3ei16_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u64m2_m(...) __riscv_vsoxseg3ei16_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8m1(...) __riscv_vsoxseg3ei16_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8m1_m(...) __riscv_vsoxseg3ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8m2(...) __riscv_vsoxseg3ei16_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8m2_m(...) __riscv_vsoxseg3ei16_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8mf2(...) __riscv_vsoxseg3ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8mf2_m(...) __riscv_vsoxseg3ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8mf4(...) __riscv_vsoxseg3ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8mf4_m(...) __riscv_vsoxseg3ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8mf8(...) __riscv_vsoxseg3ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg3ei16_v_u8mf8_m(...) __riscv_vsoxseg3ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f16m1(...) __riscv_vsoxseg3ei32_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f16m1_m(...) __riscv_vsoxseg3ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f16m2(...) __riscv_vsoxseg3ei32_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f16m2_m(...) __riscv_vsoxseg3ei32_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f16mf2(...) __riscv_vsoxseg3ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f16mf2_m(...) __riscv_vsoxseg3ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f16mf4(...) __riscv_vsoxseg3ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f16mf4_m(...) __riscv_vsoxseg3ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f32m1(...) __riscv_vsoxseg3ei32_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f32m1_m(...) __riscv_vsoxseg3ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f32m2(...) __riscv_vsoxseg3ei32_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f32m2_m(...) __riscv_vsoxseg3ei32_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f32mf2(...) __riscv_vsoxseg3ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f32mf2_m(...) __riscv_vsoxseg3ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f64m1(...) __riscv_vsoxseg3ei32_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f64m1_m(...) __riscv_vsoxseg3ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f64m2(...) __riscv_vsoxseg3ei32_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_f64m2_m(...) __riscv_vsoxseg3ei32_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i16m1(...) __riscv_vsoxseg3ei32_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i16m1_m(...) __riscv_vsoxseg3ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i16m2(...) __riscv_vsoxseg3ei32_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i16m2_m(...) __riscv_vsoxseg3ei32_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i16mf2(...) __riscv_vsoxseg3ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i16mf2_m(...) __riscv_vsoxseg3ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i16mf4(...) __riscv_vsoxseg3ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i16mf4_m(...) __riscv_vsoxseg3ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i32m1(...) __riscv_vsoxseg3ei32_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i32m1_m(...) __riscv_vsoxseg3ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i32m2(...) __riscv_vsoxseg3ei32_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i32m2_m(...) __riscv_vsoxseg3ei32_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i32mf2(...) __riscv_vsoxseg3ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i32mf2_m(...) __riscv_vsoxseg3ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i64m1(...) __riscv_vsoxseg3ei32_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i64m1_m(...) __riscv_vsoxseg3ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i64m2(...) __riscv_vsoxseg3ei32_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i64m2_m(...) __riscv_vsoxseg3ei32_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8m1(...) __riscv_vsoxseg3ei32_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8m1_m(...) __riscv_vsoxseg3ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8m2(...) __riscv_vsoxseg3ei32_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8m2_m(...) __riscv_vsoxseg3ei32_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8mf2(...) __riscv_vsoxseg3ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8mf2_m(...) __riscv_vsoxseg3ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8mf4(...) __riscv_vsoxseg3ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8mf4_m(...) __riscv_vsoxseg3ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8mf8(...) __riscv_vsoxseg3ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_i8mf8_m(...) __riscv_vsoxseg3ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u16m1(...) __riscv_vsoxseg3ei32_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u16m1_m(...) __riscv_vsoxseg3ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u16m2(...) __riscv_vsoxseg3ei32_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u16m2_m(...) __riscv_vsoxseg3ei32_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u16mf2(...) __riscv_vsoxseg3ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u16mf2_m(...) __riscv_vsoxseg3ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u16mf4(...) __riscv_vsoxseg3ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u16mf4_m(...) __riscv_vsoxseg3ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u32m1(...) __riscv_vsoxseg3ei32_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u32m1_m(...) __riscv_vsoxseg3ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u32m2(...) __riscv_vsoxseg3ei32_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u32m2_m(...) __riscv_vsoxseg3ei32_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u32mf2(...) __riscv_vsoxseg3ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u32mf2_m(...) __riscv_vsoxseg3ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u64m1(...) __riscv_vsoxseg3ei32_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u64m1_m(...) __riscv_vsoxseg3ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u64m2(...) __riscv_vsoxseg3ei32_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u64m2_m(...) __riscv_vsoxseg3ei32_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8m1(...) __riscv_vsoxseg3ei32_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8m1_m(...) __riscv_vsoxseg3ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8m2(...) __riscv_vsoxseg3ei32_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8m2_m(...) __riscv_vsoxseg3ei32_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8mf2(...) __riscv_vsoxseg3ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8mf2_m(...) __riscv_vsoxseg3ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8mf4(...) __riscv_vsoxseg3ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8mf4_m(...) __riscv_vsoxseg3ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8mf8(...) __riscv_vsoxseg3ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg3ei32_v_u8mf8_m(...) __riscv_vsoxseg3ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f16m1(...) __riscv_vsoxseg3ei64_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f16m1_m(...) __riscv_vsoxseg3ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f16m2(...) __riscv_vsoxseg3ei64_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f16m2_m(...) __riscv_vsoxseg3ei64_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f16mf2(...) __riscv_vsoxseg3ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f16mf2_m(...) __riscv_vsoxseg3ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f16mf4(...) __riscv_vsoxseg3ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f16mf4_m(...) __riscv_vsoxseg3ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f32m1(...) __riscv_vsoxseg3ei64_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f32m1_m(...) __riscv_vsoxseg3ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f32m2(...) __riscv_vsoxseg3ei64_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f32m2_m(...) __riscv_vsoxseg3ei64_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f32mf2(...) __riscv_vsoxseg3ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f32mf2_m(...) __riscv_vsoxseg3ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f64m1(...) __riscv_vsoxseg3ei64_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f64m1_m(...) __riscv_vsoxseg3ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f64m2(...) __riscv_vsoxseg3ei64_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_f64m2_m(...) __riscv_vsoxseg3ei64_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i16m1(...) __riscv_vsoxseg3ei64_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i16m1_m(...) __riscv_vsoxseg3ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i16m2(...) __riscv_vsoxseg3ei64_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i16m2_m(...) __riscv_vsoxseg3ei64_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i16mf2(...) __riscv_vsoxseg3ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i16mf2_m(...) __riscv_vsoxseg3ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i16mf4(...) __riscv_vsoxseg3ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i16mf4_m(...) __riscv_vsoxseg3ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i32m1(...) __riscv_vsoxseg3ei64_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i32m1_m(...) __riscv_vsoxseg3ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i32m2(...) __riscv_vsoxseg3ei64_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i32m2_m(...) __riscv_vsoxseg3ei64_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i32mf2(...) __riscv_vsoxseg3ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i32mf2_m(...) __riscv_vsoxseg3ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i64m1(...) __riscv_vsoxseg3ei64_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i64m1_m(...) __riscv_vsoxseg3ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i64m2(...) __riscv_vsoxseg3ei64_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i64m2_m(...) __riscv_vsoxseg3ei64_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i8m1(...) __riscv_vsoxseg3ei64_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i8m1_m(...) __riscv_vsoxseg3ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i8mf2(...) __riscv_vsoxseg3ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i8mf2_m(...) __riscv_vsoxseg3ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i8mf4(...) __riscv_vsoxseg3ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i8mf4_m(...) __riscv_vsoxseg3ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i8mf8(...) __riscv_vsoxseg3ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_i8mf8_m(...) __riscv_vsoxseg3ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u16m1(...) __riscv_vsoxseg3ei64_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u16m1_m(...) __riscv_vsoxseg3ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u16m2(...) __riscv_vsoxseg3ei64_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u16m2_m(...) __riscv_vsoxseg3ei64_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u16mf2(...) __riscv_vsoxseg3ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u16mf2_m(...) __riscv_vsoxseg3ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u16mf4(...) __riscv_vsoxseg3ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u16mf4_m(...) __riscv_vsoxseg3ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u32m1(...) __riscv_vsoxseg3ei64_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u32m1_m(...) __riscv_vsoxseg3ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u32m2(...) __riscv_vsoxseg3ei64_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u32m2_m(...) __riscv_vsoxseg3ei64_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u32mf2(...) __riscv_vsoxseg3ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u32mf2_m(...) __riscv_vsoxseg3ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u64m1(...) __riscv_vsoxseg3ei64_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u64m1_m(...) __riscv_vsoxseg3ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u64m2(...) __riscv_vsoxseg3ei64_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u64m2_m(...) __riscv_vsoxseg3ei64_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u8m1(...) __riscv_vsoxseg3ei64_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u8m1_m(...) __riscv_vsoxseg3ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u8mf2(...) __riscv_vsoxseg3ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u8mf2_m(...) __riscv_vsoxseg3ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u8mf4(...) __riscv_vsoxseg3ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u8mf4_m(...) __riscv_vsoxseg3ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u8mf8(...) __riscv_vsoxseg3ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg3ei64_v_u8mf8_m(...) __riscv_vsoxseg3ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f16m1(...) __riscv_vsoxseg3ei8_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f16m1_m(...) __riscv_vsoxseg3ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f16m2(...) __riscv_vsoxseg3ei8_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f16m2_m(...) __riscv_vsoxseg3ei8_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f16mf2(...) __riscv_vsoxseg3ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f16mf2_m(...) __riscv_vsoxseg3ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f16mf4(...) __riscv_vsoxseg3ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f16mf4_m(...) __riscv_vsoxseg3ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f32m1(...) __riscv_vsoxseg3ei8_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f32m1_m(...) __riscv_vsoxseg3ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f32m2(...) __riscv_vsoxseg3ei8_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f32m2_m(...) __riscv_vsoxseg3ei8_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f32mf2(...) __riscv_vsoxseg3ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f32mf2_m(...) __riscv_vsoxseg3ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f64m1(...) __riscv_vsoxseg3ei8_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f64m1_m(...) __riscv_vsoxseg3ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f64m2(...) __riscv_vsoxseg3ei8_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_f64m2_m(...) __riscv_vsoxseg3ei8_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i16m1(...) __riscv_vsoxseg3ei8_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i16m1_m(...) __riscv_vsoxseg3ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i16m2(...) __riscv_vsoxseg3ei8_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i16m2_m(...) __riscv_vsoxseg3ei8_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i16mf2(...) __riscv_vsoxseg3ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i16mf2_m(...) __riscv_vsoxseg3ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i16mf4(...) __riscv_vsoxseg3ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i16mf4_m(...) __riscv_vsoxseg3ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i32m1(...) __riscv_vsoxseg3ei8_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i32m1_m(...) __riscv_vsoxseg3ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i32m2(...) __riscv_vsoxseg3ei8_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i32m2_m(...) __riscv_vsoxseg3ei8_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i32mf2(...) __riscv_vsoxseg3ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i32mf2_m(...) __riscv_vsoxseg3ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i64m1(...) __riscv_vsoxseg3ei8_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i64m1_m(...) __riscv_vsoxseg3ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i64m2(...) __riscv_vsoxseg3ei8_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i64m2_m(...) __riscv_vsoxseg3ei8_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8m1(...) __riscv_vsoxseg3ei8_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8m1_m(...) __riscv_vsoxseg3ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8m2(...) __riscv_vsoxseg3ei8_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8m2_m(...) __riscv_vsoxseg3ei8_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8mf2(...) __riscv_vsoxseg3ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8mf2_m(...) __riscv_vsoxseg3ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8mf4(...) __riscv_vsoxseg3ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8mf4_m(...) __riscv_vsoxseg3ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8mf8(...) __riscv_vsoxseg3ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_i8mf8_m(...) __riscv_vsoxseg3ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u16m1(...) __riscv_vsoxseg3ei8_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u16m1_m(...) __riscv_vsoxseg3ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u16m2(...) __riscv_vsoxseg3ei8_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u16m2_m(...) __riscv_vsoxseg3ei8_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u16mf2(...) __riscv_vsoxseg3ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u16mf2_m(...) __riscv_vsoxseg3ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u16mf4(...) __riscv_vsoxseg3ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u16mf4_m(...) __riscv_vsoxseg3ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u32m1(...) __riscv_vsoxseg3ei8_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u32m1_m(...) __riscv_vsoxseg3ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u32m2(...) __riscv_vsoxseg3ei8_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u32m2_m(...) __riscv_vsoxseg3ei8_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u32mf2(...) __riscv_vsoxseg3ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u32mf2_m(...) __riscv_vsoxseg3ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u64m1(...) __riscv_vsoxseg3ei8_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u64m1_m(...) __riscv_vsoxseg3ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u64m2(...) __riscv_vsoxseg3ei8_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u64m2_m(...) __riscv_vsoxseg3ei8_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8m1(...) __riscv_vsoxseg3ei8_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8m1_m(...) __riscv_vsoxseg3ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8m2(...) __riscv_vsoxseg3ei8_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8m2_m(...) __riscv_vsoxseg3ei8_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8mf2(...) __riscv_vsoxseg3ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8mf2_m(...) __riscv_vsoxseg3ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8mf4(...) __riscv_vsoxseg3ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8mf4_m(...) __riscv_vsoxseg3ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8mf8(...) __riscv_vsoxseg3ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg3ei8_v_u8mf8_m(...) __riscv_vsoxseg3ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f16m1(...) __riscv_vsoxseg4ei16_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f16m1_m(...) __riscv_vsoxseg4ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f16m2(...) __riscv_vsoxseg4ei16_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f16m2_m(...) __riscv_vsoxseg4ei16_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f16mf2(...) __riscv_vsoxseg4ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f16mf2_m(...) __riscv_vsoxseg4ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f16mf4(...) __riscv_vsoxseg4ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f16mf4_m(...) __riscv_vsoxseg4ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f32m1(...) __riscv_vsoxseg4ei16_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f32m1_m(...) __riscv_vsoxseg4ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f32m2(...) __riscv_vsoxseg4ei16_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f32m2_m(...) __riscv_vsoxseg4ei16_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f32mf2(...) __riscv_vsoxseg4ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f32mf2_m(...) __riscv_vsoxseg4ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f64m1(...) __riscv_vsoxseg4ei16_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f64m1_m(...) __riscv_vsoxseg4ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f64m2(...) __riscv_vsoxseg4ei16_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_f64m2_m(...) __riscv_vsoxseg4ei16_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i16m1(...) __riscv_vsoxseg4ei16_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i16m1_m(...) __riscv_vsoxseg4ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i16m2(...) __riscv_vsoxseg4ei16_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i16m2_m(...) __riscv_vsoxseg4ei16_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i16mf2(...) __riscv_vsoxseg4ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i16mf2_m(...) __riscv_vsoxseg4ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i16mf4(...) __riscv_vsoxseg4ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i16mf4_m(...) __riscv_vsoxseg4ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i32m1(...) __riscv_vsoxseg4ei16_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i32m1_m(...) __riscv_vsoxseg4ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i32m2(...) __riscv_vsoxseg4ei16_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i32m2_m(...) __riscv_vsoxseg4ei16_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i32mf2(...) __riscv_vsoxseg4ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i32mf2_m(...) __riscv_vsoxseg4ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i64m1(...) __riscv_vsoxseg4ei16_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i64m1_m(...) __riscv_vsoxseg4ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i64m2(...) __riscv_vsoxseg4ei16_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i64m2_m(...) __riscv_vsoxseg4ei16_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8m1(...) __riscv_vsoxseg4ei16_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8m1_m(...) __riscv_vsoxseg4ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8m2(...) __riscv_vsoxseg4ei16_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8m2_m(...) __riscv_vsoxseg4ei16_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8mf2(...) __riscv_vsoxseg4ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8mf2_m(...) __riscv_vsoxseg4ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8mf4(...) __riscv_vsoxseg4ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8mf4_m(...) __riscv_vsoxseg4ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8mf8(...) __riscv_vsoxseg4ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_i8mf8_m(...) __riscv_vsoxseg4ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u16m1(...) __riscv_vsoxseg4ei16_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u16m1_m(...) __riscv_vsoxseg4ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u16m2(...) __riscv_vsoxseg4ei16_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u16m2_m(...) __riscv_vsoxseg4ei16_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u16mf2(...) __riscv_vsoxseg4ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u16mf2_m(...) __riscv_vsoxseg4ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u16mf4(...) __riscv_vsoxseg4ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u16mf4_m(...) __riscv_vsoxseg4ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u32m1(...) __riscv_vsoxseg4ei16_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u32m1_m(...) __riscv_vsoxseg4ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u32m2(...) __riscv_vsoxseg4ei16_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u32m2_m(...) __riscv_vsoxseg4ei16_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u32mf2(...) __riscv_vsoxseg4ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u32mf2_m(...) __riscv_vsoxseg4ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u64m1(...) __riscv_vsoxseg4ei16_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u64m1_m(...) __riscv_vsoxseg4ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u64m2(...) __riscv_vsoxseg4ei16_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u64m2_m(...) __riscv_vsoxseg4ei16_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8m1(...) __riscv_vsoxseg4ei16_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8m1_m(...) __riscv_vsoxseg4ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8m2(...) __riscv_vsoxseg4ei16_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8m2_m(...) __riscv_vsoxseg4ei16_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8mf2(...) __riscv_vsoxseg4ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8mf2_m(...) __riscv_vsoxseg4ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8mf4(...) __riscv_vsoxseg4ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8mf4_m(...) __riscv_vsoxseg4ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8mf8(...) __riscv_vsoxseg4ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg4ei16_v_u8mf8_m(...) __riscv_vsoxseg4ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f16m1(...) __riscv_vsoxseg4ei32_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f16m1_m(...) __riscv_vsoxseg4ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f16m2(...) __riscv_vsoxseg4ei32_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f16m2_m(...) __riscv_vsoxseg4ei32_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f16mf2(...) __riscv_vsoxseg4ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f16mf2_m(...) __riscv_vsoxseg4ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f16mf4(...) __riscv_vsoxseg4ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f16mf4_m(...) __riscv_vsoxseg4ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f32m1(...) __riscv_vsoxseg4ei32_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f32m1_m(...) __riscv_vsoxseg4ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f32m2(...) __riscv_vsoxseg4ei32_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f32m2_m(...) __riscv_vsoxseg4ei32_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f32mf2(...) __riscv_vsoxseg4ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f32mf2_m(...) __riscv_vsoxseg4ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f64m1(...) __riscv_vsoxseg4ei32_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f64m1_m(...) __riscv_vsoxseg4ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f64m2(...) __riscv_vsoxseg4ei32_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_f64m2_m(...) __riscv_vsoxseg4ei32_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i16m1(...) __riscv_vsoxseg4ei32_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i16m1_m(...) __riscv_vsoxseg4ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i16m2(...) __riscv_vsoxseg4ei32_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i16m2_m(...) __riscv_vsoxseg4ei32_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i16mf2(...) __riscv_vsoxseg4ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i16mf2_m(...) __riscv_vsoxseg4ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i16mf4(...) __riscv_vsoxseg4ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i16mf4_m(...) __riscv_vsoxseg4ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i32m1(...) __riscv_vsoxseg4ei32_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i32m1_m(...) __riscv_vsoxseg4ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i32m2(...) __riscv_vsoxseg4ei32_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i32m2_m(...) __riscv_vsoxseg4ei32_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i32mf2(...) __riscv_vsoxseg4ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i32mf2_m(...) __riscv_vsoxseg4ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i64m1(...) __riscv_vsoxseg4ei32_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i64m1_m(...) __riscv_vsoxseg4ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i64m2(...) __riscv_vsoxseg4ei32_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i64m2_m(...) __riscv_vsoxseg4ei32_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8m1(...) __riscv_vsoxseg4ei32_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8m1_m(...) __riscv_vsoxseg4ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8m2(...) __riscv_vsoxseg4ei32_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8m2_m(...) __riscv_vsoxseg4ei32_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8mf2(...) __riscv_vsoxseg4ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8mf2_m(...) __riscv_vsoxseg4ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8mf4(...) __riscv_vsoxseg4ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8mf4_m(...) __riscv_vsoxseg4ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8mf8(...) __riscv_vsoxseg4ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_i8mf8_m(...) __riscv_vsoxseg4ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u16m1(...) __riscv_vsoxseg4ei32_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u16m1_m(...) __riscv_vsoxseg4ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u16m2(...) __riscv_vsoxseg4ei32_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u16m2_m(...) __riscv_vsoxseg4ei32_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u16mf2(...) __riscv_vsoxseg4ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u16mf2_m(...) __riscv_vsoxseg4ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u16mf4(...) __riscv_vsoxseg4ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u16mf4_m(...) __riscv_vsoxseg4ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u32m1(...) __riscv_vsoxseg4ei32_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u32m1_m(...) __riscv_vsoxseg4ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u32m2(...) __riscv_vsoxseg4ei32_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u32m2_m(...) __riscv_vsoxseg4ei32_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u32mf2(...) __riscv_vsoxseg4ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u32mf2_m(...) __riscv_vsoxseg4ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u64m1(...) __riscv_vsoxseg4ei32_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u64m1_m(...) __riscv_vsoxseg4ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u64m2(...) __riscv_vsoxseg4ei32_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u64m2_m(...) __riscv_vsoxseg4ei32_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8m1(...) __riscv_vsoxseg4ei32_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8m1_m(...) __riscv_vsoxseg4ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8m2(...) __riscv_vsoxseg4ei32_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8m2_m(...) __riscv_vsoxseg4ei32_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8mf2(...) __riscv_vsoxseg4ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8mf2_m(...) __riscv_vsoxseg4ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8mf4(...) __riscv_vsoxseg4ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8mf4_m(...) __riscv_vsoxseg4ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8mf8(...) __riscv_vsoxseg4ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg4ei32_v_u8mf8_m(...) __riscv_vsoxseg4ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f16m1(...) __riscv_vsoxseg4ei64_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f16m1_m(...) __riscv_vsoxseg4ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f16m2(...) __riscv_vsoxseg4ei64_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f16m2_m(...) __riscv_vsoxseg4ei64_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f16mf2(...) __riscv_vsoxseg4ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f16mf2_m(...) __riscv_vsoxseg4ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f16mf4(...) __riscv_vsoxseg4ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f16mf4_m(...) __riscv_vsoxseg4ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f32m1(...) __riscv_vsoxseg4ei64_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f32m1_m(...) __riscv_vsoxseg4ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f32m2(...) __riscv_vsoxseg4ei64_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f32m2_m(...) __riscv_vsoxseg4ei64_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f32mf2(...) __riscv_vsoxseg4ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f32mf2_m(...) __riscv_vsoxseg4ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f64m1(...) __riscv_vsoxseg4ei64_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f64m1_m(...) __riscv_vsoxseg4ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f64m2(...) __riscv_vsoxseg4ei64_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_f64m2_m(...) __riscv_vsoxseg4ei64_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i16m1(...) __riscv_vsoxseg4ei64_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i16m1_m(...) __riscv_vsoxseg4ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i16m2(...) __riscv_vsoxseg4ei64_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i16m2_m(...) __riscv_vsoxseg4ei64_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i16mf2(...) __riscv_vsoxseg4ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i16mf2_m(...) __riscv_vsoxseg4ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i16mf4(...) __riscv_vsoxseg4ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i16mf4_m(...) __riscv_vsoxseg4ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i32m1(...) __riscv_vsoxseg4ei64_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i32m1_m(...) __riscv_vsoxseg4ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i32m2(...) __riscv_vsoxseg4ei64_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i32m2_m(...) __riscv_vsoxseg4ei64_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i32mf2(...) __riscv_vsoxseg4ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i32mf2_m(...) __riscv_vsoxseg4ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i64m1(...) __riscv_vsoxseg4ei64_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i64m1_m(...) __riscv_vsoxseg4ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i64m2(...) __riscv_vsoxseg4ei64_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i64m2_m(...) __riscv_vsoxseg4ei64_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i8m1(...) __riscv_vsoxseg4ei64_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i8m1_m(...) __riscv_vsoxseg4ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i8mf2(...) __riscv_vsoxseg4ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i8mf2_m(...) __riscv_vsoxseg4ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i8mf4(...) __riscv_vsoxseg4ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i8mf4_m(...) __riscv_vsoxseg4ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i8mf8(...) __riscv_vsoxseg4ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_i8mf8_m(...) __riscv_vsoxseg4ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u16m1(...) __riscv_vsoxseg4ei64_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u16m1_m(...) __riscv_vsoxseg4ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u16m2(...) __riscv_vsoxseg4ei64_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u16m2_m(...) __riscv_vsoxseg4ei64_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u16mf2(...) __riscv_vsoxseg4ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u16mf2_m(...) __riscv_vsoxseg4ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u16mf4(...) __riscv_vsoxseg4ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u16mf4_m(...) __riscv_vsoxseg4ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u32m1(...) __riscv_vsoxseg4ei64_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u32m1_m(...) __riscv_vsoxseg4ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u32m2(...) __riscv_vsoxseg4ei64_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u32m2_m(...) __riscv_vsoxseg4ei64_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u32mf2(...) __riscv_vsoxseg4ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u32mf2_m(...) __riscv_vsoxseg4ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u64m1(...) __riscv_vsoxseg4ei64_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u64m1_m(...) __riscv_vsoxseg4ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u64m2(...) __riscv_vsoxseg4ei64_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u64m2_m(...) __riscv_vsoxseg4ei64_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u8m1(...) __riscv_vsoxseg4ei64_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u8m1_m(...) __riscv_vsoxseg4ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u8mf2(...) __riscv_vsoxseg4ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u8mf2_m(...) __riscv_vsoxseg4ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u8mf4(...) __riscv_vsoxseg4ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u8mf4_m(...) __riscv_vsoxseg4ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u8mf8(...) __riscv_vsoxseg4ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg4ei64_v_u8mf8_m(...) __riscv_vsoxseg4ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f16m1(...) __riscv_vsoxseg4ei8_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f16m1_m(...) __riscv_vsoxseg4ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f16m2(...) __riscv_vsoxseg4ei8_v_f16m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f16m2_m(...) __riscv_vsoxseg4ei8_v_f16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f16mf2(...) __riscv_vsoxseg4ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f16mf2_m(...) __riscv_vsoxseg4ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f16mf4(...) __riscv_vsoxseg4ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f16mf4_m(...) __riscv_vsoxseg4ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f32m1(...) __riscv_vsoxseg4ei8_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f32m1_m(...) __riscv_vsoxseg4ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f32m2(...) __riscv_vsoxseg4ei8_v_f32m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f32m2_m(...) __riscv_vsoxseg4ei8_v_f32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f32mf2(...) __riscv_vsoxseg4ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f32mf2_m(...) __riscv_vsoxseg4ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f64m1(...) __riscv_vsoxseg4ei8_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f64m1_m(...) __riscv_vsoxseg4ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f64m2(...) __riscv_vsoxseg4ei8_v_f64m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_f64m2_m(...) __riscv_vsoxseg4ei8_v_f64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i16m1(...) __riscv_vsoxseg4ei8_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i16m1_m(...) __riscv_vsoxseg4ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i16m2(...) __riscv_vsoxseg4ei8_v_i16m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i16m2_m(...) __riscv_vsoxseg4ei8_v_i16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i16mf2(...) __riscv_vsoxseg4ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i16mf2_m(...) __riscv_vsoxseg4ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i16mf4(...) __riscv_vsoxseg4ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i16mf4_m(...) __riscv_vsoxseg4ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i32m1(...) __riscv_vsoxseg4ei8_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i32m1_m(...) __riscv_vsoxseg4ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i32m2(...) __riscv_vsoxseg4ei8_v_i32m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i32m2_m(...) __riscv_vsoxseg4ei8_v_i32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i32mf2(...) __riscv_vsoxseg4ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i32mf2_m(...) __riscv_vsoxseg4ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i64m1(...) __riscv_vsoxseg4ei8_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i64m1_m(...) __riscv_vsoxseg4ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i64m2(...) __riscv_vsoxseg4ei8_v_i64m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i64m2_m(...) __riscv_vsoxseg4ei8_v_i64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8m1(...) __riscv_vsoxseg4ei8_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8m1_m(...) __riscv_vsoxseg4ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8m2(...) __riscv_vsoxseg4ei8_v_i8m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8m2_m(...) __riscv_vsoxseg4ei8_v_i8m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8mf2(...) __riscv_vsoxseg4ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8mf2_m(...) __riscv_vsoxseg4ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8mf4(...) __riscv_vsoxseg4ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8mf4_m(...) __riscv_vsoxseg4ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8mf8(...) __riscv_vsoxseg4ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_i8mf8_m(...) __riscv_vsoxseg4ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u16m1(...) __riscv_vsoxseg4ei8_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u16m1_m(...) __riscv_vsoxseg4ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u16m2(...) __riscv_vsoxseg4ei8_v_u16m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u16m2_m(...) __riscv_vsoxseg4ei8_v_u16m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u16mf2(...) __riscv_vsoxseg4ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u16mf2_m(...) __riscv_vsoxseg4ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u16mf4(...) __riscv_vsoxseg4ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u16mf4_m(...) __riscv_vsoxseg4ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u32m1(...) __riscv_vsoxseg4ei8_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u32m1_m(...) __riscv_vsoxseg4ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u32m2(...) __riscv_vsoxseg4ei8_v_u32m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u32m2_m(...) __riscv_vsoxseg4ei8_v_u32m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u32mf2(...) __riscv_vsoxseg4ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u32mf2_m(...) __riscv_vsoxseg4ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u64m1(...) __riscv_vsoxseg4ei8_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u64m1_m(...) __riscv_vsoxseg4ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u64m2(...) __riscv_vsoxseg4ei8_v_u64m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u64m2_m(...) __riscv_vsoxseg4ei8_v_u64m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8m1(...) __riscv_vsoxseg4ei8_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8m1_m(...) __riscv_vsoxseg4ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8m2(...) __riscv_vsoxseg4ei8_v_u8m2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8m2_m(...) __riscv_vsoxseg4ei8_v_u8m2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8mf2(...) __riscv_vsoxseg4ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8mf2_m(...) __riscv_vsoxseg4ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8mf4(...) __riscv_vsoxseg4ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8mf4_m(...) __riscv_vsoxseg4ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8mf8(...) __riscv_vsoxseg4ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg4ei8_v_u8mf8_m(...) __riscv_vsoxseg4ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f16m1(...) __riscv_vsoxseg5ei16_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f16m1_m(...) __riscv_vsoxseg5ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f16mf2(...) __riscv_vsoxseg5ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f16mf2_m(...) __riscv_vsoxseg5ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f16mf4(...) __riscv_vsoxseg5ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f16mf4_m(...) __riscv_vsoxseg5ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f32m1(...) __riscv_vsoxseg5ei16_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f32m1_m(...) __riscv_vsoxseg5ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f32mf2(...) __riscv_vsoxseg5ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f32mf2_m(...) __riscv_vsoxseg5ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f64m1(...) __riscv_vsoxseg5ei16_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_f64m1_m(...) __riscv_vsoxseg5ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i16m1(...) __riscv_vsoxseg5ei16_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i16m1_m(...) __riscv_vsoxseg5ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i16mf2(...) __riscv_vsoxseg5ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i16mf2_m(...) __riscv_vsoxseg5ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i16mf4(...) __riscv_vsoxseg5ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i16mf4_m(...) __riscv_vsoxseg5ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i32m1(...) __riscv_vsoxseg5ei16_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i32m1_m(...) __riscv_vsoxseg5ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i32mf2(...) __riscv_vsoxseg5ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i32mf2_m(...) __riscv_vsoxseg5ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i64m1(...) __riscv_vsoxseg5ei16_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i64m1_m(...) __riscv_vsoxseg5ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i8m1(...) __riscv_vsoxseg5ei16_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i8m1_m(...) __riscv_vsoxseg5ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i8mf2(...) __riscv_vsoxseg5ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i8mf2_m(...) __riscv_vsoxseg5ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i8mf4(...) __riscv_vsoxseg5ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i8mf4_m(...) __riscv_vsoxseg5ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i8mf8(...) __riscv_vsoxseg5ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_i8mf8_m(...) __riscv_vsoxseg5ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u16m1(...) __riscv_vsoxseg5ei16_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u16m1_m(...) __riscv_vsoxseg5ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u16mf2(...) __riscv_vsoxseg5ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u16mf2_m(...) __riscv_vsoxseg5ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u16mf4(...) __riscv_vsoxseg5ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u16mf4_m(...) __riscv_vsoxseg5ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u32m1(...) __riscv_vsoxseg5ei16_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u32m1_m(...) __riscv_vsoxseg5ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u32mf2(...) __riscv_vsoxseg5ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u32mf2_m(...) __riscv_vsoxseg5ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u64m1(...) __riscv_vsoxseg5ei16_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u64m1_m(...) __riscv_vsoxseg5ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u8m1(...) __riscv_vsoxseg5ei16_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u8m1_m(...) __riscv_vsoxseg5ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u8mf2(...) __riscv_vsoxseg5ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u8mf2_m(...) __riscv_vsoxseg5ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u8mf4(...) __riscv_vsoxseg5ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u8mf4_m(...) __riscv_vsoxseg5ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u8mf8(...) __riscv_vsoxseg5ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg5ei16_v_u8mf8_m(...) __riscv_vsoxseg5ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f16m1(...) __riscv_vsoxseg5ei32_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f16m1_m(...) __riscv_vsoxseg5ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f16mf2(...) __riscv_vsoxseg5ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f16mf2_m(...) __riscv_vsoxseg5ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f16mf4(...) __riscv_vsoxseg5ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f16mf4_m(...) __riscv_vsoxseg5ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f32m1(...) __riscv_vsoxseg5ei32_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f32m1_m(...) __riscv_vsoxseg5ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f32mf2(...) __riscv_vsoxseg5ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f32mf2_m(...) __riscv_vsoxseg5ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f64m1(...) __riscv_vsoxseg5ei32_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_f64m1_m(...) __riscv_vsoxseg5ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i16m1(...) __riscv_vsoxseg5ei32_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i16m1_m(...) __riscv_vsoxseg5ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i16mf2(...) __riscv_vsoxseg5ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i16mf2_m(...) __riscv_vsoxseg5ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i16mf4(...) __riscv_vsoxseg5ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i16mf4_m(...) __riscv_vsoxseg5ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i32m1(...) __riscv_vsoxseg5ei32_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i32m1_m(...) __riscv_vsoxseg5ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i32mf2(...) __riscv_vsoxseg5ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i32mf2_m(...) __riscv_vsoxseg5ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i64m1(...) __riscv_vsoxseg5ei32_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i64m1_m(...) __riscv_vsoxseg5ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i8m1(...) __riscv_vsoxseg5ei32_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i8m1_m(...) __riscv_vsoxseg5ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i8mf2(...) __riscv_vsoxseg5ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i8mf2_m(...) __riscv_vsoxseg5ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i8mf4(...) __riscv_vsoxseg5ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i8mf4_m(...) __riscv_vsoxseg5ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i8mf8(...) __riscv_vsoxseg5ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_i8mf8_m(...) __riscv_vsoxseg5ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u16m1(...) __riscv_vsoxseg5ei32_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u16m1_m(...) __riscv_vsoxseg5ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u16mf2(...) __riscv_vsoxseg5ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u16mf2_m(...) __riscv_vsoxseg5ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u16mf4(...) __riscv_vsoxseg5ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u16mf4_m(...) __riscv_vsoxseg5ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u32m1(...) __riscv_vsoxseg5ei32_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u32m1_m(...) __riscv_vsoxseg5ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u32mf2(...) __riscv_vsoxseg5ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u32mf2_m(...) __riscv_vsoxseg5ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u64m1(...) __riscv_vsoxseg5ei32_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u64m1_m(...) __riscv_vsoxseg5ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u8m1(...) __riscv_vsoxseg5ei32_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u8m1_m(...) __riscv_vsoxseg5ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u8mf2(...) __riscv_vsoxseg5ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u8mf2_m(...) __riscv_vsoxseg5ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u8mf4(...) __riscv_vsoxseg5ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u8mf4_m(...) __riscv_vsoxseg5ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u8mf8(...) __riscv_vsoxseg5ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg5ei32_v_u8mf8_m(...) __riscv_vsoxseg5ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f16m1(...) __riscv_vsoxseg5ei64_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f16m1_m(...) __riscv_vsoxseg5ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f16mf2(...) __riscv_vsoxseg5ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f16mf2_m(...) __riscv_vsoxseg5ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f16mf4(...) __riscv_vsoxseg5ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f16mf4_m(...) __riscv_vsoxseg5ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f32m1(...) __riscv_vsoxseg5ei64_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f32m1_m(...) __riscv_vsoxseg5ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f32mf2(...) __riscv_vsoxseg5ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f32mf2_m(...) __riscv_vsoxseg5ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f64m1(...) __riscv_vsoxseg5ei64_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_f64m1_m(...) __riscv_vsoxseg5ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i16m1(...) __riscv_vsoxseg5ei64_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i16m1_m(...) __riscv_vsoxseg5ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i16mf2(...) __riscv_vsoxseg5ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i16mf2_m(...) __riscv_vsoxseg5ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i16mf4(...) __riscv_vsoxseg5ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i16mf4_m(...) __riscv_vsoxseg5ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i32m1(...) __riscv_vsoxseg5ei64_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i32m1_m(...) __riscv_vsoxseg5ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i32mf2(...) __riscv_vsoxseg5ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i32mf2_m(...) __riscv_vsoxseg5ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i64m1(...) __riscv_vsoxseg5ei64_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i64m1_m(...) __riscv_vsoxseg5ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i8m1(...) __riscv_vsoxseg5ei64_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i8m1_m(...) __riscv_vsoxseg5ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i8mf2(...) __riscv_vsoxseg5ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i8mf2_m(...) __riscv_vsoxseg5ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i8mf4(...) __riscv_vsoxseg5ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i8mf4_m(...) __riscv_vsoxseg5ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i8mf8(...) __riscv_vsoxseg5ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_i8mf8_m(...) __riscv_vsoxseg5ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u16m1(...) __riscv_vsoxseg5ei64_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u16m1_m(...) __riscv_vsoxseg5ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u16mf2(...) __riscv_vsoxseg5ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u16mf2_m(...) __riscv_vsoxseg5ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u16mf4(...) __riscv_vsoxseg5ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u16mf4_m(...) __riscv_vsoxseg5ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u32m1(...) __riscv_vsoxseg5ei64_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u32m1_m(...) __riscv_vsoxseg5ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u32mf2(...) __riscv_vsoxseg5ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u32mf2_m(...) __riscv_vsoxseg5ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u64m1(...) __riscv_vsoxseg5ei64_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u64m1_m(...) __riscv_vsoxseg5ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u8m1(...) __riscv_vsoxseg5ei64_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u8m1_m(...) __riscv_vsoxseg5ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u8mf2(...) __riscv_vsoxseg5ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u8mf2_m(...) __riscv_vsoxseg5ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u8mf4(...) __riscv_vsoxseg5ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u8mf4_m(...) __riscv_vsoxseg5ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u8mf8(...) __riscv_vsoxseg5ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg5ei64_v_u8mf8_m(...) __riscv_vsoxseg5ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f16m1(...) __riscv_vsoxseg5ei8_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f16m1_m(...) __riscv_vsoxseg5ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f16mf2(...) __riscv_vsoxseg5ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f16mf2_m(...) __riscv_vsoxseg5ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f16mf4(...) __riscv_vsoxseg5ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f16mf4_m(...) __riscv_vsoxseg5ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f32m1(...) __riscv_vsoxseg5ei8_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f32m1_m(...) __riscv_vsoxseg5ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f32mf2(...) __riscv_vsoxseg5ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f32mf2_m(...) __riscv_vsoxseg5ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f64m1(...) __riscv_vsoxseg5ei8_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_f64m1_m(...) __riscv_vsoxseg5ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i16m1(...) __riscv_vsoxseg5ei8_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i16m1_m(...) __riscv_vsoxseg5ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i16mf2(...) __riscv_vsoxseg5ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i16mf2_m(...) __riscv_vsoxseg5ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i16mf4(...) __riscv_vsoxseg5ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i16mf4_m(...) __riscv_vsoxseg5ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i32m1(...) __riscv_vsoxseg5ei8_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i32m1_m(...) __riscv_vsoxseg5ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i32mf2(...) __riscv_vsoxseg5ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i32mf2_m(...) __riscv_vsoxseg5ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i64m1(...) __riscv_vsoxseg5ei8_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i64m1_m(...) __riscv_vsoxseg5ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i8m1(...) __riscv_vsoxseg5ei8_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i8m1_m(...) __riscv_vsoxseg5ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i8mf2(...) __riscv_vsoxseg5ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i8mf2_m(...) __riscv_vsoxseg5ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i8mf4(...) __riscv_vsoxseg5ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i8mf4_m(...) __riscv_vsoxseg5ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i8mf8(...) __riscv_vsoxseg5ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_i8mf8_m(...) __riscv_vsoxseg5ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u16m1(...) __riscv_vsoxseg5ei8_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u16m1_m(...) __riscv_vsoxseg5ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u16mf2(...) __riscv_vsoxseg5ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u16mf2_m(...) __riscv_vsoxseg5ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u16mf4(...) __riscv_vsoxseg5ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u16mf4_m(...) __riscv_vsoxseg5ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u32m1(...) __riscv_vsoxseg5ei8_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u32m1_m(...) __riscv_vsoxseg5ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u32mf2(...) __riscv_vsoxseg5ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u32mf2_m(...) __riscv_vsoxseg5ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u64m1(...) __riscv_vsoxseg5ei8_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u64m1_m(...) __riscv_vsoxseg5ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u8m1(...) __riscv_vsoxseg5ei8_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u8m1_m(...) __riscv_vsoxseg5ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u8mf2(...) __riscv_vsoxseg5ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u8mf2_m(...) __riscv_vsoxseg5ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u8mf4(...) __riscv_vsoxseg5ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u8mf4_m(...) __riscv_vsoxseg5ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u8mf8(...) __riscv_vsoxseg5ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg5ei8_v_u8mf8_m(...) __riscv_vsoxseg5ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f16m1(...) __riscv_vsoxseg6ei16_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f16m1_m(...) __riscv_vsoxseg6ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f16mf2(...) __riscv_vsoxseg6ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f16mf2_m(...) __riscv_vsoxseg6ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f16mf4(...) __riscv_vsoxseg6ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f16mf4_m(...) __riscv_vsoxseg6ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f32m1(...) __riscv_vsoxseg6ei16_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f32m1_m(...) __riscv_vsoxseg6ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f32mf2(...) __riscv_vsoxseg6ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f32mf2_m(...) __riscv_vsoxseg6ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f64m1(...) __riscv_vsoxseg6ei16_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_f64m1_m(...) __riscv_vsoxseg6ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i16m1(...) __riscv_vsoxseg6ei16_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i16m1_m(...) __riscv_vsoxseg6ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i16mf2(...) __riscv_vsoxseg6ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i16mf2_m(...) __riscv_vsoxseg6ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i16mf4(...) __riscv_vsoxseg6ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i16mf4_m(...) __riscv_vsoxseg6ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i32m1(...) __riscv_vsoxseg6ei16_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i32m1_m(...) __riscv_vsoxseg6ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i32mf2(...) __riscv_vsoxseg6ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i32mf2_m(...) __riscv_vsoxseg6ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i64m1(...) __riscv_vsoxseg6ei16_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i64m1_m(...) __riscv_vsoxseg6ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i8m1(...) __riscv_vsoxseg6ei16_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i8m1_m(...) __riscv_vsoxseg6ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i8mf2(...) __riscv_vsoxseg6ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i8mf2_m(...) __riscv_vsoxseg6ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i8mf4(...) __riscv_vsoxseg6ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i8mf4_m(...) __riscv_vsoxseg6ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i8mf8(...) __riscv_vsoxseg6ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_i8mf8_m(...) __riscv_vsoxseg6ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u16m1(...) __riscv_vsoxseg6ei16_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u16m1_m(...) __riscv_vsoxseg6ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u16mf2(...) __riscv_vsoxseg6ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u16mf2_m(...) __riscv_vsoxseg6ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u16mf4(...) __riscv_vsoxseg6ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u16mf4_m(...) __riscv_vsoxseg6ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u32m1(...) __riscv_vsoxseg6ei16_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u32m1_m(...) __riscv_vsoxseg6ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u32mf2(...) __riscv_vsoxseg6ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u32mf2_m(...) __riscv_vsoxseg6ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u64m1(...) __riscv_vsoxseg6ei16_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u64m1_m(...) __riscv_vsoxseg6ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u8m1(...) __riscv_vsoxseg6ei16_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u8m1_m(...) __riscv_vsoxseg6ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u8mf2(...) __riscv_vsoxseg6ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u8mf2_m(...) __riscv_vsoxseg6ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u8mf4(...) __riscv_vsoxseg6ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u8mf4_m(...) __riscv_vsoxseg6ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u8mf8(...) __riscv_vsoxseg6ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg6ei16_v_u8mf8_m(...) __riscv_vsoxseg6ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f16m1(...) __riscv_vsoxseg6ei32_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f16m1_m(...) __riscv_vsoxseg6ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f16mf2(...) __riscv_vsoxseg6ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f16mf2_m(...) __riscv_vsoxseg6ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f16mf4(...) __riscv_vsoxseg6ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f16mf4_m(...) __riscv_vsoxseg6ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f32m1(...) __riscv_vsoxseg6ei32_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f32m1_m(...) __riscv_vsoxseg6ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f32mf2(...) __riscv_vsoxseg6ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f32mf2_m(...) __riscv_vsoxseg6ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f64m1(...) __riscv_vsoxseg6ei32_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_f64m1_m(...) __riscv_vsoxseg6ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i16m1(...) __riscv_vsoxseg6ei32_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i16m1_m(...) __riscv_vsoxseg6ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i16mf2(...) __riscv_vsoxseg6ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i16mf2_m(...) __riscv_vsoxseg6ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i16mf4(...) __riscv_vsoxseg6ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i16mf4_m(...) __riscv_vsoxseg6ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i32m1(...) __riscv_vsoxseg6ei32_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i32m1_m(...) __riscv_vsoxseg6ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i32mf2(...) __riscv_vsoxseg6ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i32mf2_m(...) __riscv_vsoxseg6ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i64m1(...) __riscv_vsoxseg6ei32_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i64m1_m(...) __riscv_vsoxseg6ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i8m1(...) __riscv_vsoxseg6ei32_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i8m1_m(...) __riscv_vsoxseg6ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i8mf2(...) __riscv_vsoxseg6ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i8mf2_m(...) __riscv_vsoxseg6ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i8mf4(...) __riscv_vsoxseg6ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i8mf4_m(...) __riscv_vsoxseg6ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i8mf8(...) __riscv_vsoxseg6ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_i8mf8_m(...) __riscv_vsoxseg6ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u16m1(...) __riscv_vsoxseg6ei32_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u16m1_m(...) __riscv_vsoxseg6ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u16mf2(...) __riscv_vsoxseg6ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u16mf2_m(...) __riscv_vsoxseg6ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u16mf4(...) __riscv_vsoxseg6ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u16mf4_m(...) __riscv_vsoxseg6ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u32m1(...) __riscv_vsoxseg6ei32_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u32m1_m(...) __riscv_vsoxseg6ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u32mf2(...) __riscv_vsoxseg6ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u32mf2_m(...) __riscv_vsoxseg6ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u64m1(...) __riscv_vsoxseg6ei32_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u64m1_m(...) __riscv_vsoxseg6ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u8m1(...) __riscv_vsoxseg6ei32_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u8m1_m(...) __riscv_vsoxseg6ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u8mf2(...) __riscv_vsoxseg6ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u8mf2_m(...) __riscv_vsoxseg6ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u8mf4(...) __riscv_vsoxseg6ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u8mf4_m(...) __riscv_vsoxseg6ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u8mf8(...) __riscv_vsoxseg6ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg6ei32_v_u8mf8_m(...) __riscv_vsoxseg6ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f16m1(...) __riscv_vsoxseg6ei64_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f16m1_m(...) __riscv_vsoxseg6ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f16mf2(...) __riscv_vsoxseg6ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f16mf2_m(...) __riscv_vsoxseg6ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f16mf4(...) __riscv_vsoxseg6ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f16mf4_m(...) __riscv_vsoxseg6ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f32m1(...) __riscv_vsoxseg6ei64_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f32m1_m(...) __riscv_vsoxseg6ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f32mf2(...) __riscv_vsoxseg6ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f32mf2_m(...) __riscv_vsoxseg6ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f64m1(...) __riscv_vsoxseg6ei64_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_f64m1_m(...) __riscv_vsoxseg6ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i16m1(...) __riscv_vsoxseg6ei64_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i16m1_m(...) __riscv_vsoxseg6ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i16mf2(...) __riscv_vsoxseg6ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i16mf2_m(...) __riscv_vsoxseg6ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i16mf4(...) __riscv_vsoxseg6ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i16mf4_m(...) __riscv_vsoxseg6ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i32m1(...) __riscv_vsoxseg6ei64_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i32m1_m(...) __riscv_vsoxseg6ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i32mf2(...) __riscv_vsoxseg6ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i32mf2_m(...) __riscv_vsoxseg6ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i64m1(...) __riscv_vsoxseg6ei64_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i64m1_m(...) __riscv_vsoxseg6ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i8m1(...) __riscv_vsoxseg6ei64_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i8m1_m(...) __riscv_vsoxseg6ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i8mf2(...) __riscv_vsoxseg6ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i8mf2_m(...) __riscv_vsoxseg6ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i8mf4(...) __riscv_vsoxseg6ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i8mf4_m(...) __riscv_vsoxseg6ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i8mf8(...) __riscv_vsoxseg6ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_i8mf8_m(...) __riscv_vsoxseg6ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u16m1(...) __riscv_vsoxseg6ei64_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u16m1_m(...) __riscv_vsoxseg6ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u16mf2(...) __riscv_vsoxseg6ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u16mf2_m(...) __riscv_vsoxseg6ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u16mf4(...) __riscv_vsoxseg6ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u16mf4_m(...) __riscv_vsoxseg6ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u32m1(...) __riscv_vsoxseg6ei64_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u32m1_m(...) __riscv_vsoxseg6ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u32mf2(...) __riscv_vsoxseg6ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u32mf2_m(...) __riscv_vsoxseg6ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u64m1(...) __riscv_vsoxseg6ei64_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u64m1_m(...) __riscv_vsoxseg6ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u8m1(...) __riscv_vsoxseg6ei64_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u8m1_m(...) __riscv_vsoxseg6ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u8mf2(...) __riscv_vsoxseg6ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u8mf2_m(...) __riscv_vsoxseg6ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u8mf4(...) __riscv_vsoxseg6ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u8mf4_m(...) __riscv_vsoxseg6ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u8mf8(...) __riscv_vsoxseg6ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg6ei64_v_u8mf8_m(...) __riscv_vsoxseg6ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f16m1(...) __riscv_vsoxseg6ei8_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f16m1_m(...) __riscv_vsoxseg6ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f16mf2(...) __riscv_vsoxseg6ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f16mf2_m(...) __riscv_vsoxseg6ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f16mf4(...) __riscv_vsoxseg6ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f16mf4_m(...) __riscv_vsoxseg6ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f32m1(...) __riscv_vsoxseg6ei8_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f32m1_m(...) __riscv_vsoxseg6ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f32mf2(...) __riscv_vsoxseg6ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f32mf2_m(...) __riscv_vsoxseg6ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f64m1(...) __riscv_vsoxseg6ei8_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_f64m1_m(...) __riscv_vsoxseg6ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i16m1(...) __riscv_vsoxseg6ei8_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i16m1_m(...) __riscv_vsoxseg6ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i16mf2(...) __riscv_vsoxseg6ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i16mf2_m(...) __riscv_vsoxseg6ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i16mf4(...) __riscv_vsoxseg6ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i16mf4_m(...) __riscv_vsoxseg6ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i32m1(...) __riscv_vsoxseg6ei8_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i32m1_m(...) __riscv_vsoxseg6ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i32mf2(...) __riscv_vsoxseg6ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i32mf2_m(...) __riscv_vsoxseg6ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i64m1(...) __riscv_vsoxseg6ei8_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i64m1_m(...) __riscv_vsoxseg6ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i8m1(...) __riscv_vsoxseg6ei8_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i8m1_m(...) __riscv_vsoxseg6ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i8mf2(...) __riscv_vsoxseg6ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i8mf2_m(...) __riscv_vsoxseg6ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i8mf4(...) __riscv_vsoxseg6ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i8mf4_m(...) __riscv_vsoxseg6ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i8mf8(...) __riscv_vsoxseg6ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_i8mf8_m(...) __riscv_vsoxseg6ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u16m1(...) __riscv_vsoxseg6ei8_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u16m1_m(...) __riscv_vsoxseg6ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u16mf2(...) __riscv_vsoxseg6ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u16mf2_m(...) __riscv_vsoxseg6ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u16mf4(...) __riscv_vsoxseg6ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u16mf4_m(...) __riscv_vsoxseg6ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u32m1(...) __riscv_vsoxseg6ei8_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u32m1_m(...) __riscv_vsoxseg6ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u32mf2(...) __riscv_vsoxseg6ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u32mf2_m(...) __riscv_vsoxseg6ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u64m1(...) __riscv_vsoxseg6ei8_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u64m1_m(...) __riscv_vsoxseg6ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u8m1(...) __riscv_vsoxseg6ei8_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u8m1_m(...) __riscv_vsoxseg6ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u8mf2(...) __riscv_vsoxseg6ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u8mf2_m(...) __riscv_vsoxseg6ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u8mf4(...) __riscv_vsoxseg6ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u8mf4_m(...) __riscv_vsoxseg6ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u8mf8(...) __riscv_vsoxseg6ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg6ei8_v_u8mf8_m(...) __riscv_vsoxseg6ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f16m1(...) __riscv_vsoxseg7ei16_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f16m1_m(...) __riscv_vsoxseg7ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f16mf2(...) __riscv_vsoxseg7ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f16mf2_m(...) __riscv_vsoxseg7ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f16mf4(...) __riscv_vsoxseg7ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f16mf4_m(...) __riscv_vsoxseg7ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f32m1(...) __riscv_vsoxseg7ei16_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f32m1_m(...) __riscv_vsoxseg7ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f32mf2(...) __riscv_vsoxseg7ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f32mf2_m(...) __riscv_vsoxseg7ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f64m1(...) __riscv_vsoxseg7ei16_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_f64m1_m(...) __riscv_vsoxseg7ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i16m1(...) __riscv_vsoxseg7ei16_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i16m1_m(...) __riscv_vsoxseg7ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i16mf2(...) __riscv_vsoxseg7ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i16mf2_m(...) __riscv_vsoxseg7ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i16mf4(...) __riscv_vsoxseg7ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i16mf4_m(...) __riscv_vsoxseg7ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i32m1(...) __riscv_vsoxseg7ei16_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i32m1_m(...) __riscv_vsoxseg7ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i32mf2(...) __riscv_vsoxseg7ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i32mf2_m(...) __riscv_vsoxseg7ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i64m1(...) __riscv_vsoxseg7ei16_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i64m1_m(...) __riscv_vsoxseg7ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i8m1(...) __riscv_vsoxseg7ei16_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i8m1_m(...) __riscv_vsoxseg7ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i8mf2(...) __riscv_vsoxseg7ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i8mf2_m(...) __riscv_vsoxseg7ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i8mf4(...) __riscv_vsoxseg7ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i8mf4_m(...) __riscv_vsoxseg7ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i8mf8(...) __riscv_vsoxseg7ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_i8mf8_m(...) __riscv_vsoxseg7ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u16m1(...) __riscv_vsoxseg7ei16_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u16m1_m(...) __riscv_vsoxseg7ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u16mf2(...) __riscv_vsoxseg7ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u16mf2_m(...) __riscv_vsoxseg7ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u16mf4(...) __riscv_vsoxseg7ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u16mf4_m(...) __riscv_vsoxseg7ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u32m1(...) __riscv_vsoxseg7ei16_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u32m1_m(...) __riscv_vsoxseg7ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u32mf2(...) __riscv_vsoxseg7ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u32mf2_m(...) __riscv_vsoxseg7ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u64m1(...) __riscv_vsoxseg7ei16_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u64m1_m(...) __riscv_vsoxseg7ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u8m1(...) __riscv_vsoxseg7ei16_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u8m1_m(...) __riscv_vsoxseg7ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u8mf2(...) __riscv_vsoxseg7ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u8mf2_m(...) __riscv_vsoxseg7ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u8mf4(...) __riscv_vsoxseg7ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u8mf4_m(...) __riscv_vsoxseg7ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u8mf8(...) __riscv_vsoxseg7ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg7ei16_v_u8mf8_m(...) __riscv_vsoxseg7ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f16m1(...) __riscv_vsoxseg7ei32_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f16m1_m(...) __riscv_vsoxseg7ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f16mf2(...) __riscv_vsoxseg7ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f16mf2_m(...) __riscv_vsoxseg7ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f16mf4(...) __riscv_vsoxseg7ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f16mf4_m(...) __riscv_vsoxseg7ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f32m1(...) __riscv_vsoxseg7ei32_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f32m1_m(...) __riscv_vsoxseg7ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f32mf2(...) __riscv_vsoxseg7ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f32mf2_m(...) __riscv_vsoxseg7ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f64m1(...) __riscv_vsoxseg7ei32_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_f64m1_m(...) __riscv_vsoxseg7ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i16m1(...) __riscv_vsoxseg7ei32_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i16m1_m(...) __riscv_vsoxseg7ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i16mf2(...) __riscv_vsoxseg7ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i16mf2_m(...) __riscv_vsoxseg7ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i16mf4(...) __riscv_vsoxseg7ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i16mf4_m(...) __riscv_vsoxseg7ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i32m1(...) __riscv_vsoxseg7ei32_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i32m1_m(...) __riscv_vsoxseg7ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i32mf2(...) __riscv_vsoxseg7ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i32mf2_m(...) __riscv_vsoxseg7ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i64m1(...) __riscv_vsoxseg7ei32_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i64m1_m(...) __riscv_vsoxseg7ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i8m1(...) __riscv_vsoxseg7ei32_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i8m1_m(...) __riscv_vsoxseg7ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i8mf2(...) __riscv_vsoxseg7ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i8mf2_m(...) __riscv_vsoxseg7ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i8mf4(...) __riscv_vsoxseg7ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i8mf4_m(...) __riscv_vsoxseg7ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i8mf8(...) __riscv_vsoxseg7ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_i8mf8_m(...) __riscv_vsoxseg7ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u16m1(...) __riscv_vsoxseg7ei32_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u16m1_m(...) __riscv_vsoxseg7ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u16mf2(...) __riscv_vsoxseg7ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u16mf2_m(...) __riscv_vsoxseg7ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u16mf4(...) __riscv_vsoxseg7ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u16mf4_m(...) __riscv_vsoxseg7ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u32m1(...) __riscv_vsoxseg7ei32_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u32m1_m(...) __riscv_vsoxseg7ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u32mf2(...) __riscv_vsoxseg7ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u32mf2_m(...) __riscv_vsoxseg7ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u64m1(...) __riscv_vsoxseg7ei32_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u64m1_m(...) __riscv_vsoxseg7ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u8m1(...) __riscv_vsoxseg7ei32_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u8m1_m(...) __riscv_vsoxseg7ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u8mf2(...) __riscv_vsoxseg7ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u8mf2_m(...) __riscv_vsoxseg7ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u8mf4(...) __riscv_vsoxseg7ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u8mf4_m(...) __riscv_vsoxseg7ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u8mf8(...) __riscv_vsoxseg7ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg7ei32_v_u8mf8_m(...) __riscv_vsoxseg7ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f16m1(...) __riscv_vsoxseg7ei64_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f16m1_m(...) __riscv_vsoxseg7ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f16mf2(...) __riscv_vsoxseg7ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f16mf2_m(...) __riscv_vsoxseg7ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f16mf4(...) __riscv_vsoxseg7ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f16mf4_m(...) __riscv_vsoxseg7ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f32m1(...) __riscv_vsoxseg7ei64_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f32m1_m(...) __riscv_vsoxseg7ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f32mf2(...) __riscv_vsoxseg7ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f32mf2_m(...) __riscv_vsoxseg7ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f64m1(...) __riscv_vsoxseg7ei64_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_f64m1_m(...) __riscv_vsoxseg7ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i16m1(...) __riscv_vsoxseg7ei64_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i16m1_m(...) __riscv_vsoxseg7ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i16mf2(...) __riscv_vsoxseg7ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i16mf2_m(...) __riscv_vsoxseg7ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i16mf4(...) __riscv_vsoxseg7ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i16mf4_m(...) __riscv_vsoxseg7ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i32m1(...) __riscv_vsoxseg7ei64_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i32m1_m(...) __riscv_vsoxseg7ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i32mf2(...) __riscv_vsoxseg7ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i32mf2_m(...) __riscv_vsoxseg7ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i64m1(...) __riscv_vsoxseg7ei64_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i64m1_m(...) __riscv_vsoxseg7ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i8m1(...) __riscv_vsoxseg7ei64_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i8m1_m(...) __riscv_vsoxseg7ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i8mf2(...) __riscv_vsoxseg7ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i8mf2_m(...) __riscv_vsoxseg7ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i8mf4(...) __riscv_vsoxseg7ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i8mf4_m(...) __riscv_vsoxseg7ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i8mf8(...) __riscv_vsoxseg7ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_i8mf8_m(...) __riscv_vsoxseg7ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u16m1(...) __riscv_vsoxseg7ei64_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u16m1_m(...) __riscv_vsoxseg7ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u16mf2(...) __riscv_vsoxseg7ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u16mf2_m(...) __riscv_vsoxseg7ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u16mf4(...) __riscv_vsoxseg7ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u16mf4_m(...) __riscv_vsoxseg7ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u32m1(...) __riscv_vsoxseg7ei64_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u32m1_m(...) __riscv_vsoxseg7ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u32mf2(...) __riscv_vsoxseg7ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u32mf2_m(...) __riscv_vsoxseg7ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u64m1(...) __riscv_vsoxseg7ei64_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u64m1_m(...) __riscv_vsoxseg7ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u8m1(...) __riscv_vsoxseg7ei64_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u8m1_m(...) __riscv_vsoxseg7ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u8mf2(...) __riscv_vsoxseg7ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u8mf2_m(...) __riscv_vsoxseg7ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u8mf4(...) __riscv_vsoxseg7ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u8mf4_m(...) __riscv_vsoxseg7ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u8mf8(...) __riscv_vsoxseg7ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg7ei64_v_u8mf8_m(...) __riscv_vsoxseg7ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f16m1(...) __riscv_vsoxseg7ei8_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f16m1_m(...) __riscv_vsoxseg7ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f16mf2(...) __riscv_vsoxseg7ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f16mf2_m(...) __riscv_vsoxseg7ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f16mf4(...) __riscv_vsoxseg7ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f16mf4_m(...) __riscv_vsoxseg7ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f32m1(...) __riscv_vsoxseg7ei8_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f32m1_m(...) __riscv_vsoxseg7ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f32mf2(...) __riscv_vsoxseg7ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f32mf2_m(...) __riscv_vsoxseg7ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f64m1(...) __riscv_vsoxseg7ei8_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_f64m1_m(...) __riscv_vsoxseg7ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i16m1(...) __riscv_vsoxseg7ei8_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i16m1_m(...) __riscv_vsoxseg7ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i16mf2(...) __riscv_vsoxseg7ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i16mf2_m(...) __riscv_vsoxseg7ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i16mf4(...) __riscv_vsoxseg7ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i16mf4_m(...) __riscv_vsoxseg7ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i32m1(...) __riscv_vsoxseg7ei8_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i32m1_m(...) __riscv_vsoxseg7ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i32mf2(...) __riscv_vsoxseg7ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i32mf2_m(...) __riscv_vsoxseg7ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i64m1(...) __riscv_vsoxseg7ei8_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i64m1_m(...) __riscv_vsoxseg7ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i8m1(...) __riscv_vsoxseg7ei8_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i8m1_m(...) __riscv_vsoxseg7ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i8mf2(...) __riscv_vsoxseg7ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i8mf2_m(...) __riscv_vsoxseg7ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i8mf4(...) __riscv_vsoxseg7ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i8mf4_m(...) __riscv_vsoxseg7ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i8mf8(...) __riscv_vsoxseg7ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_i8mf8_m(...) __riscv_vsoxseg7ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u16m1(...) __riscv_vsoxseg7ei8_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u16m1_m(...) __riscv_vsoxseg7ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u16mf2(...) __riscv_vsoxseg7ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u16mf2_m(...) __riscv_vsoxseg7ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u16mf4(...) __riscv_vsoxseg7ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u16mf4_m(...) __riscv_vsoxseg7ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u32m1(...) __riscv_vsoxseg7ei8_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u32m1_m(...) __riscv_vsoxseg7ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u32mf2(...) __riscv_vsoxseg7ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u32mf2_m(...) __riscv_vsoxseg7ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u64m1(...) __riscv_vsoxseg7ei8_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u64m1_m(...) __riscv_vsoxseg7ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u8m1(...) __riscv_vsoxseg7ei8_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u8m1_m(...) __riscv_vsoxseg7ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u8mf2(...) __riscv_vsoxseg7ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u8mf2_m(...) __riscv_vsoxseg7ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u8mf4(...) __riscv_vsoxseg7ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u8mf4_m(...) __riscv_vsoxseg7ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u8mf8(...) __riscv_vsoxseg7ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg7ei8_v_u8mf8_m(...) __riscv_vsoxseg7ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f16m1(...) __riscv_vsoxseg8ei16_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f16m1_m(...) __riscv_vsoxseg8ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f16mf2(...) __riscv_vsoxseg8ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f16mf2_m(...) __riscv_vsoxseg8ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f16mf4(...) __riscv_vsoxseg8ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f16mf4_m(...) __riscv_vsoxseg8ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f32m1(...) __riscv_vsoxseg8ei16_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f32m1_m(...) __riscv_vsoxseg8ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f32mf2(...) __riscv_vsoxseg8ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f32mf2_m(...) __riscv_vsoxseg8ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f64m1(...) __riscv_vsoxseg8ei16_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_f64m1_m(...) __riscv_vsoxseg8ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i16m1(...) __riscv_vsoxseg8ei16_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i16m1_m(...) __riscv_vsoxseg8ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i16mf2(...) __riscv_vsoxseg8ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i16mf2_m(...) __riscv_vsoxseg8ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i16mf4(...) __riscv_vsoxseg8ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i16mf4_m(...) __riscv_vsoxseg8ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i32m1(...) __riscv_vsoxseg8ei16_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i32m1_m(...) __riscv_vsoxseg8ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i32mf2(...) __riscv_vsoxseg8ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i32mf2_m(...) __riscv_vsoxseg8ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i64m1(...) __riscv_vsoxseg8ei16_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i64m1_m(...) __riscv_vsoxseg8ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i8m1(...) __riscv_vsoxseg8ei16_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i8m1_m(...) __riscv_vsoxseg8ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i8mf2(...) __riscv_vsoxseg8ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i8mf2_m(...) __riscv_vsoxseg8ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i8mf4(...) __riscv_vsoxseg8ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i8mf4_m(...) __riscv_vsoxseg8ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i8mf8(...) __riscv_vsoxseg8ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_i8mf8_m(...) __riscv_vsoxseg8ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u16m1(...) __riscv_vsoxseg8ei16_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u16m1_m(...) __riscv_vsoxseg8ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u16mf2(...) __riscv_vsoxseg8ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u16mf2_m(...) __riscv_vsoxseg8ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u16mf4(...) __riscv_vsoxseg8ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u16mf4_m(...) __riscv_vsoxseg8ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u32m1(...) __riscv_vsoxseg8ei16_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u32m1_m(...) __riscv_vsoxseg8ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u32mf2(...) __riscv_vsoxseg8ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u32mf2_m(...) __riscv_vsoxseg8ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u64m1(...) __riscv_vsoxseg8ei16_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u64m1_m(...) __riscv_vsoxseg8ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u8m1(...) __riscv_vsoxseg8ei16_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u8m1_m(...) __riscv_vsoxseg8ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u8mf2(...) __riscv_vsoxseg8ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u8mf2_m(...) __riscv_vsoxseg8ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u8mf4(...) __riscv_vsoxseg8ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u8mf4_m(...) __riscv_vsoxseg8ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u8mf8(...) __riscv_vsoxseg8ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg8ei16_v_u8mf8_m(...) __riscv_vsoxseg8ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f16m1(...) __riscv_vsoxseg8ei32_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f16m1_m(...) __riscv_vsoxseg8ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f16mf2(...) __riscv_vsoxseg8ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f16mf2_m(...) __riscv_vsoxseg8ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f16mf4(...) __riscv_vsoxseg8ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f16mf4_m(...) __riscv_vsoxseg8ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f32m1(...) __riscv_vsoxseg8ei32_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f32m1_m(...) __riscv_vsoxseg8ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f32mf2(...) __riscv_vsoxseg8ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f32mf2_m(...) __riscv_vsoxseg8ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f64m1(...) __riscv_vsoxseg8ei32_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_f64m1_m(...) __riscv_vsoxseg8ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i16m1(...) __riscv_vsoxseg8ei32_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i16m1_m(...) __riscv_vsoxseg8ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i16mf2(...) __riscv_vsoxseg8ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i16mf2_m(...) __riscv_vsoxseg8ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i16mf4(...) __riscv_vsoxseg8ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i16mf4_m(...) __riscv_vsoxseg8ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i32m1(...) __riscv_vsoxseg8ei32_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i32m1_m(...) __riscv_vsoxseg8ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i32mf2(...) __riscv_vsoxseg8ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i32mf2_m(...) __riscv_vsoxseg8ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i64m1(...) __riscv_vsoxseg8ei32_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i64m1_m(...) __riscv_vsoxseg8ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i8m1(...) __riscv_vsoxseg8ei32_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i8m1_m(...) __riscv_vsoxseg8ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i8mf2(...) __riscv_vsoxseg8ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i8mf2_m(...) __riscv_vsoxseg8ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i8mf4(...) __riscv_vsoxseg8ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i8mf4_m(...) __riscv_vsoxseg8ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i8mf8(...) __riscv_vsoxseg8ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_i8mf8_m(...) __riscv_vsoxseg8ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u16m1(...) __riscv_vsoxseg8ei32_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u16m1_m(...) __riscv_vsoxseg8ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u16mf2(...) __riscv_vsoxseg8ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u16mf2_m(...) __riscv_vsoxseg8ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u16mf4(...) __riscv_vsoxseg8ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u16mf4_m(...) __riscv_vsoxseg8ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u32m1(...) __riscv_vsoxseg8ei32_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u32m1_m(...) __riscv_vsoxseg8ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u32mf2(...) __riscv_vsoxseg8ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u32mf2_m(...) __riscv_vsoxseg8ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u64m1(...) __riscv_vsoxseg8ei32_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u64m1_m(...) __riscv_vsoxseg8ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u8m1(...) __riscv_vsoxseg8ei32_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u8m1_m(...) __riscv_vsoxseg8ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u8mf2(...) __riscv_vsoxseg8ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u8mf2_m(...) __riscv_vsoxseg8ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u8mf4(...) __riscv_vsoxseg8ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u8mf4_m(...) __riscv_vsoxseg8ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u8mf8(...) __riscv_vsoxseg8ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg8ei32_v_u8mf8_m(...) __riscv_vsoxseg8ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f16m1(...) __riscv_vsoxseg8ei64_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f16m1_m(...) __riscv_vsoxseg8ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f16mf2(...) __riscv_vsoxseg8ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f16mf2_m(...) __riscv_vsoxseg8ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f16mf4(...) __riscv_vsoxseg8ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f16mf4_m(...) __riscv_vsoxseg8ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f32m1(...) __riscv_vsoxseg8ei64_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f32m1_m(...) __riscv_vsoxseg8ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f32mf2(...) __riscv_vsoxseg8ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f32mf2_m(...) __riscv_vsoxseg8ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f64m1(...) __riscv_vsoxseg8ei64_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_f64m1_m(...) __riscv_vsoxseg8ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i16m1(...) __riscv_vsoxseg8ei64_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i16m1_m(...) __riscv_vsoxseg8ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i16mf2(...) __riscv_vsoxseg8ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i16mf2_m(...) __riscv_vsoxseg8ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i16mf4(...) __riscv_vsoxseg8ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i16mf4_m(...) __riscv_vsoxseg8ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i32m1(...) __riscv_vsoxseg8ei64_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i32m1_m(...) __riscv_vsoxseg8ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i32mf2(...) __riscv_vsoxseg8ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i32mf2_m(...) __riscv_vsoxseg8ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i64m1(...) __riscv_vsoxseg8ei64_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i64m1_m(...) __riscv_vsoxseg8ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i8m1(...) __riscv_vsoxseg8ei64_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i8m1_m(...) __riscv_vsoxseg8ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i8mf2(...) __riscv_vsoxseg8ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i8mf2_m(...) __riscv_vsoxseg8ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i8mf4(...) __riscv_vsoxseg8ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i8mf4_m(...) __riscv_vsoxseg8ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i8mf8(...) __riscv_vsoxseg8ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_i8mf8_m(...) __riscv_vsoxseg8ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u16m1(...) __riscv_vsoxseg8ei64_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u16m1_m(...) __riscv_vsoxseg8ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u16mf2(...) __riscv_vsoxseg8ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u16mf2_m(...) __riscv_vsoxseg8ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u16mf4(...) __riscv_vsoxseg8ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u16mf4_m(...) __riscv_vsoxseg8ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u32m1(...) __riscv_vsoxseg8ei64_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u32m1_m(...) __riscv_vsoxseg8ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u32mf2(...) __riscv_vsoxseg8ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u32mf2_m(...) __riscv_vsoxseg8ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u64m1(...) __riscv_vsoxseg8ei64_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u64m1_m(...) __riscv_vsoxseg8ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u8m1(...) __riscv_vsoxseg8ei64_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u8m1_m(...) __riscv_vsoxseg8ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u8mf2(...) __riscv_vsoxseg8ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u8mf2_m(...) __riscv_vsoxseg8ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u8mf4(...) __riscv_vsoxseg8ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u8mf4_m(...) __riscv_vsoxseg8ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u8mf8(...) __riscv_vsoxseg8ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg8ei64_v_u8mf8_m(...) __riscv_vsoxseg8ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f16m1(...) __riscv_vsoxseg8ei8_v_f16m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f16m1_m(...) __riscv_vsoxseg8ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f16mf2(...) __riscv_vsoxseg8ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f16mf2_m(...) __riscv_vsoxseg8ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f16mf4(...) __riscv_vsoxseg8ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f16mf4_m(...) __riscv_vsoxseg8ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f32m1(...) __riscv_vsoxseg8ei8_v_f32m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f32m1_m(...) __riscv_vsoxseg8ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f32mf2(...) __riscv_vsoxseg8ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f32mf2_m(...) __riscv_vsoxseg8ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f64m1(...) __riscv_vsoxseg8ei8_v_f64m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_f64m1_m(...) __riscv_vsoxseg8ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i16m1(...) __riscv_vsoxseg8ei8_v_i16m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i16m1_m(...) __riscv_vsoxseg8ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i16mf2(...) __riscv_vsoxseg8ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i16mf2_m(...) __riscv_vsoxseg8ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i16mf4(...) __riscv_vsoxseg8ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i16mf4_m(...) __riscv_vsoxseg8ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i32m1(...) __riscv_vsoxseg8ei8_v_i32m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i32m1_m(...) __riscv_vsoxseg8ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i32mf2(...) __riscv_vsoxseg8ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i32mf2_m(...) __riscv_vsoxseg8ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i64m1(...) __riscv_vsoxseg8ei8_v_i64m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i64m1_m(...) __riscv_vsoxseg8ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i8m1(...) __riscv_vsoxseg8ei8_v_i8m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i8m1_m(...) __riscv_vsoxseg8ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i8mf2(...) __riscv_vsoxseg8ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i8mf2_m(...) __riscv_vsoxseg8ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i8mf4(...) __riscv_vsoxseg8ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i8mf4_m(...) __riscv_vsoxseg8ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i8mf8(...) __riscv_vsoxseg8ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_i8mf8_m(...) __riscv_vsoxseg8ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u16m1(...) __riscv_vsoxseg8ei8_v_u16m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u16m1_m(...) __riscv_vsoxseg8ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u16mf2(...) __riscv_vsoxseg8ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u16mf2_m(...) __riscv_vsoxseg8ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u16mf4(...) __riscv_vsoxseg8ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u16mf4_m(...) __riscv_vsoxseg8ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u32m1(...) __riscv_vsoxseg8ei8_v_u32m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u32m1_m(...) __riscv_vsoxseg8ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u32mf2(...) __riscv_vsoxseg8ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u32mf2_m(...) __riscv_vsoxseg8ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u64m1(...) __riscv_vsoxseg8ei8_v_u64m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u64m1_m(...) __riscv_vsoxseg8ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u8m1(...) __riscv_vsoxseg8ei8_v_u8m1(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u8m1_m(...) __riscv_vsoxseg8ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u8mf2(...) __riscv_vsoxseg8ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u8mf2_m(...) __riscv_vsoxseg8ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u8mf4(...) __riscv_vsoxseg8ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u8mf4_m(...) __riscv_vsoxseg8ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u8mf8(...) __riscv_vsoxseg8ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsoxseg8ei8_v_u8mf8_m(...) __riscv_vsoxseg8ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsra_vv_i16m1(...) __riscv_vsra_vv_i16m1(__VA_ARGS__) |
| #define | vsra_vv_i16m1_m(...) __riscv_vsra_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vsra_vv_i16m2(...) __riscv_vsra_vv_i16m2(__VA_ARGS__) |
| #define | vsra_vv_i16m2_m(...) __riscv_vsra_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vsra_vv_i16m4(...) __riscv_vsra_vv_i16m4(__VA_ARGS__) |
| #define | vsra_vv_i16m4_m(...) __riscv_vsra_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vsra_vv_i16m8(...) __riscv_vsra_vv_i16m8(__VA_ARGS__) |
| #define | vsra_vv_i16m8_m(...) __riscv_vsra_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vsra_vv_i16mf2(...) __riscv_vsra_vv_i16mf2(__VA_ARGS__) |
| #define | vsra_vv_i16mf2_m(...) __riscv_vsra_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vsra_vv_i16mf4(...) __riscv_vsra_vv_i16mf4(__VA_ARGS__) |
| #define | vsra_vv_i16mf4_m(...) __riscv_vsra_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vsra_vv_i32m1(...) __riscv_vsra_vv_i32m1(__VA_ARGS__) |
| #define | vsra_vv_i32m1_m(...) __riscv_vsra_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vsra_vv_i32m2(...) __riscv_vsra_vv_i32m2(__VA_ARGS__) |
| #define | vsra_vv_i32m2_m(...) __riscv_vsra_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vsra_vv_i32m4(...) __riscv_vsra_vv_i32m4(__VA_ARGS__) |
| #define | vsra_vv_i32m4_m(...) __riscv_vsra_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vsra_vv_i32m8(...) __riscv_vsra_vv_i32m8(__VA_ARGS__) |
| #define | vsra_vv_i32m8_m(...) __riscv_vsra_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vsra_vv_i32mf2(...) __riscv_vsra_vv_i32mf2(__VA_ARGS__) |
| #define | vsra_vv_i32mf2_m(...) __riscv_vsra_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vsra_vv_i64m1(...) __riscv_vsra_vv_i64m1(__VA_ARGS__) |
| #define | vsra_vv_i64m1_m(...) __riscv_vsra_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vsra_vv_i64m2(...) __riscv_vsra_vv_i64m2(__VA_ARGS__) |
| #define | vsra_vv_i64m2_m(...) __riscv_vsra_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vsra_vv_i64m4(...) __riscv_vsra_vv_i64m4(__VA_ARGS__) |
| #define | vsra_vv_i64m4_m(...) __riscv_vsra_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vsra_vv_i64m8(...) __riscv_vsra_vv_i64m8(__VA_ARGS__) |
| #define | vsra_vv_i64m8_m(...) __riscv_vsra_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vsra_vv_i8m1(...) __riscv_vsra_vv_i8m1(__VA_ARGS__) |
| #define | vsra_vv_i8m1_m(...) __riscv_vsra_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vsra_vv_i8m2(...) __riscv_vsra_vv_i8m2(__VA_ARGS__) |
| #define | vsra_vv_i8m2_m(...) __riscv_vsra_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vsra_vv_i8m4(...) __riscv_vsra_vv_i8m4(__VA_ARGS__) |
| #define | vsra_vv_i8m4_m(...) __riscv_vsra_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vsra_vv_i8m8(...) __riscv_vsra_vv_i8m8(__VA_ARGS__) |
| #define | vsra_vv_i8m8_m(...) __riscv_vsra_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vsra_vv_i8mf2(...) __riscv_vsra_vv_i8mf2(__VA_ARGS__) |
| #define | vsra_vv_i8mf2_m(...) __riscv_vsra_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vsra_vv_i8mf4(...) __riscv_vsra_vv_i8mf4(__VA_ARGS__) |
| #define | vsra_vv_i8mf4_m(...) __riscv_vsra_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vsra_vv_i8mf8(...) __riscv_vsra_vv_i8mf8(__VA_ARGS__) |
| #define | vsra_vv_i8mf8_m(...) __riscv_vsra_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vsra_vx_i16m1(...) __riscv_vsra_vx_i16m1(__VA_ARGS__) |
| #define | vsra_vx_i16m1_m(...) __riscv_vsra_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vsra_vx_i16m2(...) __riscv_vsra_vx_i16m2(__VA_ARGS__) |
| #define | vsra_vx_i16m2_m(...) __riscv_vsra_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vsra_vx_i16m4(...) __riscv_vsra_vx_i16m4(__VA_ARGS__) |
| #define | vsra_vx_i16m4_m(...) __riscv_vsra_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vsra_vx_i16m8(...) __riscv_vsra_vx_i16m8(__VA_ARGS__) |
| #define | vsra_vx_i16m8_m(...) __riscv_vsra_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vsra_vx_i16mf2(...) __riscv_vsra_vx_i16mf2(__VA_ARGS__) |
| #define | vsra_vx_i16mf2_m(...) __riscv_vsra_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vsra_vx_i16mf4(...) __riscv_vsra_vx_i16mf4(__VA_ARGS__) |
| #define | vsra_vx_i16mf4_m(...) __riscv_vsra_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vsra_vx_i32m1(...) __riscv_vsra_vx_i32m1(__VA_ARGS__) |
| #define | vsra_vx_i32m1_m(...) __riscv_vsra_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vsra_vx_i32m2(...) __riscv_vsra_vx_i32m2(__VA_ARGS__) |
| #define | vsra_vx_i32m2_m(...) __riscv_vsra_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vsra_vx_i32m4(...) __riscv_vsra_vx_i32m4(__VA_ARGS__) |
| #define | vsra_vx_i32m4_m(...) __riscv_vsra_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vsra_vx_i32m8(...) __riscv_vsra_vx_i32m8(__VA_ARGS__) |
| #define | vsra_vx_i32m8_m(...) __riscv_vsra_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vsra_vx_i32mf2(...) __riscv_vsra_vx_i32mf2(__VA_ARGS__) |
| #define | vsra_vx_i32mf2_m(...) __riscv_vsra_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vsra_vx_i64m1(...) __riscv_vsra_vx_i64m1(__VA_ARGS__) |
| #define | vsra_vx_i64m1_m(...) __riscv_vsra_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vsra_vx_i64m2(...) __riscv_vsra_vx_i64m2(__VA_ARGS__) |
| #define | vsra_vx_i64m2_m(...) __riscv_vsra_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vsra_vx_i64m4(...) __riscv_vsra_vx_i64m4(__VA_ARGS__) |
| #define | vsra_vx_i64m4_m(...) __riscv_vsra_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vsra_vx_i64m8(...) __riscv_vsra_vx_i64m8(__VA_ARGS__) |
| #define | vsra_vx_i64m8_m(...) __riscv_vsra_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vsra_vx_i8m1(...) __riscv_vsra_vx_i8m1(__VA_ARGS__) |
| #define | vsra_vx_i8m1_m(...) __riscv_vsra_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vsra_vx_i8m2(...) __riscv_vsra_vx_i8m2(__VA_ARGS__) |
| #define | vsra_vx_i8m2_m(...) __riscv_vsra_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vsra_vx_i8m4(...) __riscv_vsra_vx_i8m4(__VA_ARGS__) |
| #define | vsra_vx_i8m4_m(...) __riscv_vsra_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vsra_vx_i8m8(...) __riscv_vsra_vx_i8m8(__VA_ARGS__) |
| #define | vsra_vx_i8m8_m(...) __riscv_vsra_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vsra_vx_i8mf2(...) __riscv_vsra_vx_i8mf2(__VA_ARGS__) |
| #define | vsra_vx_i8mf2_m(...) __riscv_vsra_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vsra_vx_i8mf4(...) __riscv_vsra_vx_i8mf4(__VA_ARGS__) |
| #define | vsra_vx_i8mf4_m(...) __riscv_vsra_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vsra_vx_i8mf8(...) __riscv_vsra_vx_i8mf8(__VA_ARGS__) |
| #define | vsra_vx_i8mf8_m(...) __riscv_vsra_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u16m1(...) __riscv_vsrl_vv_u16m1(__VA_ARGS__) |
| #define | vsrl_vv_u16m1_m(...) __riscv_vsrl_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u16m2(...) __riscv_vsrl_vv_u16m2(__VA_ARGS__) |
| #define | vsrl_vv_u16m2_m(...) __riscv_vsrl_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u16m4(...) __riscv_vsrl_vv_u16m4(__VA_ARGS__) |
| #define | vsrl_vv_u16m4_m(...) __riscv_vsrl_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u16m8(...) __riscv_vsrl_vv_u16m8(__VA_ARGS__) |
| #define | vsrl_vv_u16m8_m(...) __riscv_vsrl_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u16mf2(...) __riscv_vsrl_vv_u16mf2(__VA_ARGS__) |
| #define | vsrl_vv_u16mf2_m(...) __riscv_vsrl_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u16mf4(...) __riscv_vsrl_vv_u16mf4(__VA_ARGS__) |
| #define | vsrl_vv_u16mf4_m(...) __riscv_vsrl_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u32m1(...) __riscv_vsrl_vv_u32m1(__VA_ARGS__) |
| #define | vsrl_vv_u32m1_m(...) __riscv_vsrl_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u32m2(...) __riscv_vsrl_vv_u32m2(__VA_ARGS__) |
| #define | vsrl_vv_u32m2_m(...) __riscv_vsrl_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u32m4(...) __riscv_vsrl_vv_u32m4(__VA_ARGS__) |
| #define | vsrl_vv_u32m4_m(...) __riscv_vsrl_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u32m8(...) __riscv_vsrl_vv_u32m8(__VA_ARGS__) |
| #define | vsrl_vv_u32m8_m(...) __riscv_vsrl_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u32mf2(...) __riscv_vsrl_vv_u32mf2(__VA_ARGS__) |
| #define | vsrl_vv_u32mf2_m(...) __riscv_vsrl_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u64m1(...) __riscv_vsrl_vv_u64m1(__VA_ARGS__) |
| #define | vsrl_vv_u64m1_m(...) __riscv_vsrl_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u64m2(...) __riscv_vsrl_vv_u64m2(__VA_ARGS__) |
| #define | vsrl_vv_u64m2_m(...) __riscv_vsrl_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u64m4(...) __riscv_vsrl_vv_u64m4(__VA_ARGS__) |
| #define | vsrl_vv_u64m4_m(...) __riscv_vsrl_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u64m8(...) __riscv_vsrl_vv_u64m8(__VA_ARGS__) |
| #define | vsrl_vv_u64m8_m(...) __riscv_vsrl_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u8m1(...) __riscv_vsrl_vv_u8m1(__VA_ARGS__) |
| #define | vsrl_vv_u8m1_m(...) __riscv_vsrl_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u8m2(...) __riscv_vsrl_vv_u8m2(__VA_ARGS__) |
| #define | vsrl_vv_u8m2_m(...) __riscv_vsrl_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u8m4(...) __riscv_vsrl_vv_u8m4(__VA_ARGS__) |
| #define | vsrl_vv_u8m4_m(...) __riscv_vsrl_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u8m8(...) __riscv_vsrl_vv_u8m8(__VA_ARGS__) |
| #define | vsrl_vv_u8m8_m(...) __riscv_vsrl_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u8mf2(...) __riscv_vsrl_vv_u8mf2(__VA_ARGS__) |
| #define | vsrl_vv_u8mf2_m(...) __riscv_vsrl_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u8mf4(...) __riscv_vsrl_vv_u8mf4(__VA_ARGS__) |
| #define | vsrl_vv_u8mf4_m(...) __riscv_vsrl_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vsrl_vv_u8mf8(...) __riscv_vsrl_vv_u8mf8(__VA_ARGS__) |
| #define | vsrl_vv_u8mf8_m(...) __riscv_vsrl_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u16m1(...) __riscv_vsrl_vx_u16m1(__VA_ARGS__) |
| #define | vsrl_vx_u16m1_m(...) __riscv_vsrl_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u16m2(...) __riscv_vsrl_vx_u16m2(__VA_ARGS__) |
| #define | vsrl_vx_u16m2_m(...) __riscv_vsrl_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u16m4(...) __riscv_vsrl_vx_u16m4(__VA_ARGS__) |
| #define | vsrl_vx_u16m4_m(...) __riscv_vsrl_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u16m8(...) __riscv_vsrl_vx_u16m8(__VA_ARGS__) |
| #define | vsrl_vx_u16m8_m(...) __riscv_vsrl_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u16mf2(...) __riscv_vsrl_vx_u16mf2(__VA_ARGS__) |
| #define | vsrl_vx_u16mf2_m(...) __riscv_vsrl_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u16mf4(...) __riscv_vsrl_vx_u16mf4(__VA_ARGS__) |
| #define | vsrl_vx_u16mf4_m(...) __riscv_vsrl_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u32m1(...) __riscv_vsrl_vx_u32m1(__VA_ARGS__) |
| #define | vsrl_vx_u32m1_m(...) __riscv_vsrl_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u32m2(...) __riscv_vsrl_vx_u32m2(__VA_ARGS__) |
| #define | vsrl_vx_u32m2_m(...) __riscv_vsrl_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u32m4(...) __riscv_vsrl_vx_u32m4(__VA_ARGS__) |
| #define | vsrl_vx_u32m4_m(...) __riscv_vsrl_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u32m8(...) __riscv_vsrl_vx_u32m8(__VA_ARGS__) |
| #define | vsrl_vx_u32m8_m(...) __riscv_vsrl_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u32mf2(...) __riscv_vsrl_vx_u32mf2(__VA_ARGS__) |
| #define | vsrl_vx_u32mf2_m(...) __riscv_vsrl_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u64m1(...) __riscv_vsrl_vx_u64m1(__VA_ARGS__) |
| #define | vsrl_vx_u64m1_m(...) __riscv_vsrl_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u64m2(...) __riscv_vsrl_vx_u64m2(__VA_ARGS__) |
| #define | vsrl_vx_u64m2_m(...) __riscv_vsrl_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u64m4(...) __riscv_vsrl_vx_u64m4(__VA_ARGS__) |
| #define | vsrl_vx_u64m4_m(...) __riscv_vsrl_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u64m8(...) __riscv_vsrl_vx_u64m8(__VA_ARGS__) |
| #define | vsrl_vx_u64m8_m(...) __riscv_vsrl_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u8m1(...) __riscv_vsrl_vx_u8m1(__VA_ARGS__) |
| #define | vsrl_vx_u8m1_m(...) __riscv_vsrl_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u8m2(...) __riscv_vsrl_vx_u8m2(__VA_ARGS__) |
| #define | vsrl_vx_u8m2_m(...) __riscv_vsrl_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u8m4(...) __riscv_vsrl_vx_u8m4(__VA_ARGS__) |
| #define | vsrl_vx_u8m4_m(...) __riscv_vsrl_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u8m8(...) __riscv_vsrl_vx_u8m8(__VA_ARGS__) |
| #define | vsrl_vx_u8m8_m(...) __riscv_vsrl_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u8mf2(...) __riscv_vsrl_vx_u8mf2(__VA_ARGS__) |
| #define | vsrl_vx_u8mf2_m(...) __riscv_vsrl_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u8mf4(...) __riscv_vsrl_vx_u8mf4(__VA_ARGS__) |
| #define | vsrl_vx_u8mf4_m(...) __riscv_vsrl_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vsrl_vx_u8mf8(...) __riscv_vsrl_vx_u8mf8(__VA_ARGS__) |
| #define | vsrl_vx_u8mf8_m(...) __riscv_vsrl_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vsse16_v_f16m1(...) __riscv_vsse16_v_f16m1(__VA_ARGS__) |
| #define | vsse16_v_f16m1_m(...) __riscv_vsse16_v_f16m1_m(__VA_ARGS__) |
| #define | vsse16_v_f16m2(...) __riscv_vsse16_v_f16m2(__VA_ARGS__) |
| #define | vsse16_v_f16m2_m(...) __riscv_vsse16_v_f16m2_m(__VA_ARGS__) |
| #define | vsse16_v_f16m4(...) __riscv_vsse16_v_f16m4(__VA_ARGS__) |
| #define | vsse16_v_f16m4_m(...) __riscv_vsse16_v_f16m4_m(__VA_ARGS__) |
| #define | vsse16_v_f16m8(...) __riscv_vsse16_v_f16m8(__VA_ARGS__) |
| #define | vsse16_v_f16m8_m(...) __riscv_vsse16_v_f16m8_m(__VA_ARGS__) |
| #define | vsse16_v_f16mf2(...) __riscv_vsse16_v_f16mf2(__VA_ARGS__) |
| #define | vsse16_v_f16mf2_m(...) __riscv_vsse16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsse16_v_f16mf4(...) __riscv_vsse16_v_f16mf4(__VA_ARGS__) |
| #define | vsse16_v_f16mf4_m(...) __riscv_vsse16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsse16_v_i16m1(...) __riscv_vsse16_v_i16m1(__VA_ARGS__) |
| #define | vsse16_v_i16m1_m(...) __riscv_vsse16_v_i16m1_m(__VA_ARGS__) |
| #define | vsse16_v_i16m2(...) __riscv_vsse16_v_i16m2(__VA_ARGS__) |
| #define | vsse16_v_i16m2_m(...) __riscv_vsse16_v_i16m2_m(__VA_ARGS__) |
| #define | vsse16_v_i16m4(...) __riscv_vsse16_v_i16m4(__VA_ARGS__) |
| #define | vsse16_v_i16m4_m(...) __riscv_vsse16_v_i16m4_m(__VA_ARGS__) |
| #define | vsse16_v_i16m8(...) __riscv_vsse16_v_i16m8(__VA_ARGS__) |
| #define | vsse16_v_i16m8_m(...) __riscv_vsse16_v_i16m8_m(__VA_ARGS__) |
| #define | vsse16_v_i16mf2(...) __riscv_vsse16_v_i16mf2(__VA_ARGS__) |
| #define | vsse16_v_i16mf2_m(...) __riscv_vsse16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsse16_v_i16mf4(...) __riscv_vsse16_v_i16mf4(__VA_ARGS__) |
| #define | vsse16_v_i16mf4_m(...) __riscv_vsse16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsse16_v_u16m1(...) __riscv_vsse16_v_u16m1(__VA_ARGS__) |
| #define | vsse16_v_u16m1_m(...) __riscv_vsse16_v_u16m1_m(__VA_ARGS__) |
| #define | vsse16_v_u16m2(...) __riscv_vsse16_v_u16m2(__VA_ARGS__) |
| #define | vsse16_v_u16m2_m(...) __riscv_vsse16_v_u16m2_m(__VA_ARGS__) |
| #define | vsse16_v_u16m4(...) __riscv_vsse16_v_u16m4(__VA_ARGS__) |
| #define | vsse16_v_u16m4_m(...) __riscv_vsse16_v_u16m4_m(__VA_ARGS__) |
| #define | vsse16_v_u16m8(...) __riscv_vsse16_v_u16m8(__VA_ARGS__) |
| #define | vsse16_v_u16m8_m(...) __riscv_vsse16_v_u16m8_m(__VA_ARGS__) |
| #define | vsse16_v_u16mf2(...) __riscv_vsse16_v_u16mf2(__VA_ARGS__) |
| #define | vsse16_v_u16mf2_m(...) __riscv_vsse16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsse16_v_u16mf4(...) __riscv_vsse16_v_u16mf4(__VA_ARGS__) |
| #define | vsse16_v_u16mf4_m(...) __riscv_vsse16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsse32_v_f32m1(...) __riscv_vsse32_v_f32m1(__VA_ARGS__) |
| #define | vsse32_v_f32m1_m(...) __riscv_vsse32_v_f32m1_m(__VA_ARGS__) |
| #define | vsse32_v_f32m2(...) __riscv_vsse32_v_f32m2(__VA_ARGS__) |
| #define | vsse32_v_f32m2_m(...) __riscv_vsse32_v_f32m2_m(__VA_ARGS__) |
| #define | vsse32_v_f32m4(...) __riscv_vsse32_v_f32m4(__VA_ARGS__) |
| #define | vsse32_v_f32m4_m(...) __riscv_vsse32_v_f32m4_m(__VA_ARGS__) |
| #define | vsse32_v_f32m8(...) __riscv_vsse32_v_f32m8(__VA_ARGS__) |
| #define | vsse32_v_f32m8_m(...) __riscv_vsse32_v_f32m8_m(__VA_ARGS__) |
| #define | vsse32_v_f32mf2(...) __riscv_vsse32_v_f32mf2(__VA_ARGS__) |
| #define | vsse32_v_f32mf2_m(...) __riscv_vsse32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsse32_v_i32m1(...) __riscv_vsse32_v_i32m1(__VA_ARGS__) |
| #define | vsse32_v_i32m1_m(...) __riscv_vsse32_v_i32m1_m(__VA_ARGS__) |
| #define | vsse32_v_i32m2(...) __riscv_vsse32_v_i32m2(__VA_ARGS__) |
| #define | vsse32_v_i32m2_m(...) __riscv_vsse32_v_i32m2_m(__VA_ARGS__) |
| #define | vsse32_v_i32m4(...) __riscv_vsse32_v_i32m4(__VA_ARGS__) |
| #define | vsse32_v_i32m4_m(...) __riscv_vsse32_v_i32m4_m(__VA_ARGS__) |
| #define | vsse32_v_i32m8(...) __riscv_vsse32_v_i32m8(__VA_ARGS__) |
| #define | vsse32_v_i32m8_m(...) __riscv_vsse32_v_i32m8_m(__VA_ARGS__) |
| #define | vsse32_v_i32mf2(...) __riscv_vsse32_v_i32mf2(__VA_ARGS__) |
| #define | vsse32_v_i32mf2_m(...) __riscv_vsse32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsse32_v_u32m1(...) __riscv_vsse32_v_u32m1(__VA_ARGS__) |
| #define | vsse32_v_u32m1_m(...) __riscv_vsse32_v_u32m1_m(__VA_ARGS__) |
| #define | vsse32_v_u32m2(...) __riscv_vsse32_v_u32m2(__VA_ARGS__) |
| #define | vsse32_v_u32m2_m(...) __riscv_vsse32_v_u32m2_m(__VA_ARGS__) |
| #define | vsse32_v_u32m4(...) __riscv_vsse32_v_u32m4(__VA_ARGS__) |
| #define | vsse32_v_u32m4_m(...) __riscv_vsse32_v_u32m4_m(__VA_ARGS__) |
| #define | vsse32_v_u32m8(...) __riscv_vsse32_v_u32m8(__VA_ARGS__) |
| #define | vsse32_v_u32m8_m(...) __riscv_vsse32_v_u32m8_m(__VA_ARGS__) |
| #define | vsse32_v_u32mf2(...) __riscv_vsse32_v_u32mf2(__VA_ARGS__) |
| #define | vsse32_v_u32mf2_m(...) __riscv_vsse32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsse64_v_f64m1(...) __riscv_vsse64_v_f64m1(__VA_ARGS__) |
| #define | vsse64_v_f64m1_m(...) __riscv_vsse64_v_f64m1_m(__VA_ARGS__) |
| #define | vsse64_v_f64m2(...) __riscv_vsse64_v_f64m2(__VA_ARGS__) |
| #define | vsse64_v_f64m2_m(...) __riscv_vsse64_v_f64m2_m(__VA_ARGS__) |
| #define | vsse64_v_f64m4(...) __riscv_vsse64_v_f64m4(__VA_ARGS__) |
| #define | vsse64_v_f64m4_m(...) __riscv_vsse64_v_f64m4_m(__VA_ARGS__) |
| #define | vsse64_v_f64m8(...) __riscv_vsse64_v_f64m8(__VA_ARGS__) |
| #define | vsse64_v_f64m8_m(...) __riscv_vsse64_v_f64m8_m(__VA_ARGS__) |
| #define | vsse64_v_i64m1(...) __riscv_vsse64_v_i64m1(__VA_ARGS__) |
| #define | vsse64_v_i64m1_m(...) __riscv_vsse64_v_i64m1_m(__VA_ARGS__) |
| #define | vsse64_v_i64m2(...) __riscv_vsse64_v_i64m2(__VA_ARGS__) |
| #define | vsse64_v_i64m2_m(...) __riscv_vsse64_v_i64m2_m(__VA_ARGS__) |
| #define | vsse64_v_i64m4(...) __riscv_vsse64_v_i64m4(__VA_ARGS__) |
| #define | vsse64_v_i64m4_m(...) __riscv_vsse64_v_i64m4_m(__VA_ARGS__) |
| #define | vsse64_v_i64m8(...) __riscv_vsse64_v_i64m8(__VA_ARGS__) |
| #define | vsse64_v_i64m8_m(...) __riscv_vsse64_v_i64m8_m(__VA_ARGS__) |
| #define | vsse64_v_u64m1(...) __riscv_vsse64_v_u64m1(__VA_ARGS__) |
| #define | vsse64_v_u64m1_m(...) __riscv_vsse64_v_u64m1_m(__VA_ARGS__) |
| #define | vsse64_v_u64m2(...) __riscv_vsse64_v_u64m2(__VA_ARGS__) |
| #define | vsse64_v_u64m2_m(...) __riscv_vsse64_v_u64m2_m(__VA_ARGS__) |
| #define | vsse64_v_u64m4(...) __riscv_vsse64_v_u64m4(__VA_ARGS__) |
| #define | vsse64_v_u64m4_m(...) __riscv_vsse64_v_u64m4_m(__VA_ARGS__) |
| #define | vsse64_v_u64m8(...) __riscv_vsse64_v_u64m8(__VA_ARGS__) |
| #define | vsse64_v_u64m8_m(...) __riscv_vsse64_v_u64m8_m(__VA_ARGS__) |
| #define | vsse8_v_i8m1(...) __riscv_vsse8_v_i8m1(__VA_ARGS__) |
| #define | vsse8_v_i8m1_m(...) __riscv_vsse8_v_i8m1_m(__VA_ARGS__) |
| #define | vsse8_v_i8m2(...) __riscv_vsse8_v_i8m2(__VA_ARGS__) |
| #define | vsse8_v_i8m2_m(...) __riscv_vsse8_v_i8m2_m(__VA_ARGS__) |
| #define | vsse8_v_i8m4(...) __riscv_vsse8_v_i8m4(__VA_ARGS__) |
| #define | vsse8_v_i8m4_m(...) __riscv_vsse8_v_i8m4_m(__VA_ARGS__) |
| #define | vsse8_v_i8m8(...) __riscv_vsse8_v_i8m8(__VA_ARGS__) |
| #define | vsse8_v_i8m8_m(...) __riscv_vsse8_v_i8m8_m(__VA_ARGS__) |
| #define | vsse8_v_i8mf2(...) __riscv_vsse8_v_i8mf2(__VA_ARGS__) |
| #define | vsse8_v_i8mf2_m(...) __riscv_vsse8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsse8_v_i8mf4(...) __riscv_vsse8_v_i8mf4(__VA_ARGS__) |
| #define | vsse8_v_i8mf4_m(...) __riscv_vsse8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsse8_v_i8mf8(...) __riscv_vsse8_v_i8mf8(__VA_ARGS__) |
| #define | vsse8_v_i8mf8_m(...) __riscv_vsse8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsse8_v_u8m1(...) __riscv_vsse8_v_u8m1(__VA_ARGS__) |
| #define | vsse8_v_u8m1_m(...) __riscv_vsse8_v_u8m1_m(__VA_ARGS__) |
| #define | vsse8_v_u8m2(...) __riscv_vsse8_v_u8m2(__VA_ARGS__) |
| #define | vsse8_v_u8m2_m(...) __riscv_vsse8_v_u8m2_m(__VA_ARGS__) |
| #define | vsse8_v_u8m4(...) __riscv_vsse8_v_u8m4(__VA_ARGS__) |
| #define | vsse8_v_u8m4_m(...) __riscv_vsse8_v_u8m4_m(__VA_ARGS__) |
| #define | vsse8_v_u8m8(...) __riscv_vsse8_v_u8m8(__VA_ARGS__) |
| #define | vsse8_v_u8m8_m(...) __riscv_vsse8_v_u8m8_m(__VA_ARGS__) |
| #define | vsse8_v_u8mf2(...) __riscv_vsse8_v_u8mf2(__VA_ARGS__) |
| #define | vsse8_v_u8mf2_m(...) __riscv_vsse8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsse8_v_u8mf4(...) __riscv_vsse8_v_u8mf4(__VA_ARGS__) |
| #define | vsse8_v_u8mf4_m(...) __riscv_vsse8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsse8_v_u8mf8(...) __riscv_vsse8_v_u8mf8(__VA_ARGS__) |
| #define | vsse8_v_u8mf8_m(...) __riscv_vsse8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsseg2e16_v_f16m1(...) __riscv_vsseg2e16_v_f16m1(__VA_ARGS__) |
| #define | vsseg2e16_v_f16m1_m(...) __riscv_vsseg2e16_v_f16m1_m(__VA_ARGS__) |
| #define | vsseg2e16_v_f16m2(...) __riscv_vsseg2e16_v_f16m2(__VA_ARGS__) |
| #define | vsseg2e16_v_f16m2_m(...) __riscv_vsseg2e16_v_f16m2_m(__VA_ARGS__) |
| #define | vsseg2e16_v_f16m4(...) __riscv_vsseg2e16_v_f16m4(__VA_ARGS__) |
| #define | vsseg2e16_v_f16m4_m(...) __riscv_vsseg2e16_v_f16m4_m(__VA_ARGS__) |
| #define | vsseg2e16_v_f16mf2(...) __riscv_vsseg2e16_v_f16mf2(__VA_ARGS__) |
| #define | vsseg2e16_v_f16mf2_m(...) __riscv_vsseg2e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsseg2e16_v_f16mf4(...) __riscv_vsseg2e16_v_f16mf4(__VA_ARGS__) |
| #define | vsseg2e16_v_f16mf4_m(...) __riscv_vsseg2e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsseg2e16_v_i16m1(...) __riscv_vsseg2e16_v_i16m1(__VA_ARGS__) |
| #define | vsseg2e16_v_i16m1_m(...) __riscv_vsseg2e16_v_i16m1_m(__VA_ARGS__) |
| #define | vsseg2e16_v_i16m2(...) __riscv_vsseg2e16_v_i16m2(__VA_ARGS__) |
| #define | vsseg2e16_v_i16m2_m(...) __riscv_vsseg2e16_v_i16m2_m(__VA_ARGS__) |
| #define | vsseg2e16_v_i16m4(...) __riscv_vsseg2e16_v_i16m4(__VA_ARGS__) |
| #define | vsseg2e16_v_i16m4_m(...) __riscv_vsseg2e16_v_i16m4_m(__VA_ARGS__) |
| #define | vsseg2e16_v_i16mf2(...) __riscv_vsseg2e16_v_i16mf2(__VA_ARGS__) |
| #define | vsseg2e16_v_i16mf2_m(...) __riscv_vsseg2e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsseg2e16_v_i16mf4(...) __riscv_vsseg2e16_v_i16mf4(__VA_ARGS__) |
| #define | vsseg2e16_v_i16mf4_m(...) __riscv_vsseg2e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsseg2e16_v_u16m1(...) __riscv_vsseg2e16_v_u16m1(__VA_ARGS__) |
| #define | vsseg2e16_v_u16m1_m(...) __riscv_vsseg2e16_v_u16m1_m(__VA_ARGS__) |
| #define | vsseg2e16_v_u16m2(...) __riscv_vsseg2e16_v_u16m2(__VA_ARGS__) |
| #define | vsseg2e16_v_u16m2_m(...) __riscv_vsseg2e16_v_u16m2_m(__VA_ARGS__) |
| #define | vsseg2e16_v_u16m4(...) __riscv_vsseg2e16_v_u16m4(__VA_ARGS__) |
| #define | vsseg2e16_v_u16m4_m(...) __riscv_vsseg2e16_v_u16m4_m(__VA_ARGS__) |
| #define | vsseg2e16_v_u16mf2(...) __riscv_vsseg2e16_v_u16mf2(__VA_ARGS__) |
| #define | vsseg2e16_v_u16mf2_m(...) __riscv_vsseg2e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsseg2e16_v_u16mf4(...) __riscv_vsseg2e16_v_u16mf4(__VA_ARGS__) |
| #define | vsseg2e16_v_u16mf4_m(...) __riscv_vsseg2e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsseg2e32_v_f32m1(...) __riscv_vsseg2e32_v_f32m1(__VA_ARGS__) |
| #define | vsseg2e32_v_f32m1_m(...) __riscv_vsseg2e32_v_f32m1_m(__VA_ARGS__) |
| #define | vsseg2e32_v_f32m2(...) __riscv_vsseg2e32_v_f32m2(__VA_ARGS__) |
| #define | vsseg2e32_v_f32m2_m(...) __riscv_vsseg2e32_v_f32m2_m(__VA_ARGS__) |
| #define | vsseg2e32_v_f32m4(...) __riscv_vsseg2e32_v_f32m4(__VA_ARGS__) |
| #define | vsseg2e32_v_f32m4_m(...) __riscv_vsseg2e32_v_f32m4_m(__VA_ARGS__) |
| #define | vsseg2e32_v_f32mf2(...) __riscv_vsseg2e32_v_f32mf2(__VA_ARGS__) |
| #define | vsseg2e32_v_f32mf2_m(...) __riscv_vsseg2e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsseg2e32_v_i32m1(...) __riscv_vsseg2e32_v_i32m1(__VA_ARGS__) |
| #define | vsseg2e32_v_i32m1_m(...) __riscv_vsseg2e32_v_i32m1_m(__VA_ARGS__) |
| #define | vsseg2e32_v_i32m2(...) __riscv_vsseg2e32_v_i32m2(__VA_ARGS__) |
| #define | vsseg2e32_v_i32m2_m(...) __riscv_vsseg2e32_v_i32m2_m(__VA_ARGS__) |
| #define | vsseg2e32_v_i32m4(...) __riscv_vsseg2e32_v_i32m4(__VA_ARGS__) |
| #define | vsseg2e32_v_i32m4_m(...) __riscv_vsseg2e32_v_i32m4_m(__VA_ARGS__) |
| #define | vsseg2e32_v_i32mf2(...) __riscv_vsseg2e32_v_i32mf2(__VA_ARGS__) |
| #define | vsseg2e32_v_i32mf2_m(...) __riscv_vsseg2e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsseg2e32_v_u32m1(...) __riscv_vsseg2e32_v_u32m1(__VA_ARGS__) |
| #define | vsseg2e32_v_u32m1_m(...) __riscv_vsseg2e32_v_u32m1_m(__VA_ARGS__) |
| #define | vsseg2e32_v_u32m2(...) __riscv_vsseg2e32_v_u32m2(__VA_ARGS__) |
| #define | vsseg2e32_v_u32m2_m(...) __riscv_vsseg2e32_v_u32m2_m(__VA_ARGS__) |
| #define | vsseg2e32_v_u32m4(...) __riscv_vsseg2e32_v_u32m4(__VA_ARGS__) |
| #define | vsseg2e32_v_u32m4_m(...) __riscv_vsseg2e32_v_u32m4_m(__VA_ARGS__) |
| #define | vsseg2e32_v_u32mf2(...) __riscv_vsseg2e32_v_u32mf2(__VA_ARGS__) |
| #define | vsseg2e32_v_u32mf2_m(...) __riscv_vsseg2e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsseg2e64_v_f64m1(...) __riscv_vsseg2e64_v_f64m1(__VA_ARGS__) |
| #define | vsseg2e64_v_f64m1_m(...) __riscv_vsseg2e64_v_f64m1_m(__VA_ARGS__) |
| #define | vsseg2e64_v_f64m2(...) __riscv_vsseg2e64_v_f64m2(__VA_ARGS__) |
| #define | vsseg2e64_v_f64m2_m(...) __riscv_vsseg2e64_v_f64m2_m(__VA_ARGS__) |
| #define | vsseg2e64_v_f64m4(...) __riscv_vsseg2e64_v_f64m4(__VA_ARGS__) |
| #define | vsseg2e64_v_f64m4_m(...) __riscv_vsseg2e64_v_f64m4_m(__VA_ARGS__) |
| #define | vsseg2e64_v_i64m1(...) __riscv_vsseg2e64_v_i64m1(__VA_ARGS__) |
| #define | vsseg2e64_v_i64m1_m(...) __riscv_vsseg2e64_v_i64m1_m(__VA_ARGS__) |
| #define | vsseg2e64_v_i64m2(...) __riscv_vsseg2e64_v_i64m2(__VA_ARGS__) |
| #define | vsseg2e64_v_i64m2_m(...) __riscv_vsseg2e64_v_i64m2_m(__VA_ARGS__) |
| #define | vsseg2e64_v_i64m4(...) __riscv_vsseg2e64_v_i64m4(__VA_ARGS__) |
| #define | vsseg2e64_v_i64m4_m(...) __riscv_vsseg2e64_v_i64m4_m(__VA_ARGS__) |
| #define | vsseg2e64_v_u64m1(...) __riscv_vsseg2e64_v_u64m1(__VA_ARGS__) |
| #define | vsseg2e64_v_u64m1_m(...) __riscv_vsseg2e64_v_u64m1_m(__VA_ARGS__) |
| #define | vsseg2e64_v_u64m2(...) __riscv_vsseg2e64_v_u64m2(__VA_ARGS__) |
| #define | vsseg2e64_v_u64m2_m(...) __riscv_vsseg2e64_v_u64m2_m(__VA_ARGS__) |
| #define | vsseg2e64_v_u64m4(...) __riscv_vsseg2e64_v_u64m4(__VA_ARGS__) |
| #define | vsseg2e64_v_u64m4_m(...) __riscv_vsseg2e64_v_u64m4_m(__VA_ARGS__) |
| #define | vsseg2e8_v_i8m1(...) __riscv_vsseg2e8_v_i8m1(__VA_ARGS__) |
| #define | vsseg2e8_v_i8m1_m(...) __riscv_vsseg2e8_v_i8m1_m(__VA_ARGS__) |
| #define | vsseg2e8_v_i8m2(...) __riscv_vsseg2e8_v_i8m2(__VA_ARGS__) |
| #define | vsseg2e8_v_i8m2_m(...) __riscv_vsseg2e8_v_i8m2_m(__VA_ARGS__) |
| #define | vsseg2e8_v_i8m4(...) __riscv_vsseg2e8_v_i8m4(__VA_ARGS__) |
| #define | vsseg2e8_v_i8m4_m(...) __riscv_vsseg2e8_v_i8m4_m(__VA_ARGS__) |
| #define | vsseg2e8_v_i8mf2(...) __riscv_vsseg2e8_v_i8mf2(__VA_ARGS__) |
| #define | vsseg2e8_v_i8mf2_m(...) __riscv_vsseg2e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsseg2e8_v_i8mf4(...) __riscv_vsseg2e8_v_i8mf4(__VA_ARGS__) |
| #define | vsseg2e8_v_i8mf4_m(...) __riscv_vsseg2e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsseg2e8_v_i8mf8(...) __riscv_vsseg2e8_v_i8mf8(__VA_ARGS__) |
| #define | vsseg2e8_v_i8mf8_m(...) __riscv_vsseg2e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsseg2e8_v_u8m1(...) __riscv_vsseg2e8_v_u8m1(__VA_ARGS__) |
| #define | vsseg2e8_v_u8m1_m(...) __riscv_vsseg2e8_v_u8m1_m(__VA_ARGS__) |
| #define | vsseg2e8_v_u8m2(...) __riscv_vsseg2e8_v_u8m2(__VA_ARGS__) |
| #define | vsseg2e8_v_u8m2_m(...) __riscv_vsseg2e8_v_u8m2_m(__VA_ARGS__) |
| #define | vsseg2e8_v_u8m4(...) __riscv_vsseg2e8_v_u8m4(__VA_ARGS__) |
| #define | vsseg2e8_v_u8m4_m(...) __riscv_vsseg2e8_v_u8m4_m(__VA_ARGS__) |
| #define | vsseg2e8_v_u8mf2(...) __riscv_vsseg2e8_v_u8mf2(__VA_ARGS__) |
| #define | vsseg2e8_v_u8mf2_m(...) __riscv_vsseg2e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsseg2e8_v_u8mf4(...) __riscv_vsseg2e8_v_u8mf4(__VA_ARGS__) |
| #define | vsseg2e8_v_u8mf4_m(...) __riscv_vsseg2e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsseg2e8_v_u8mf8(...) __riscv_vsseg2e8_v_u8mf8(__VA_ARGS__) |
| #define | vsseg2e8_v_u8mf8_m(...) __riscv_vsseg2e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsseg3e16_v_f16m1(...) __riscv_vsseg3e16_v_f16m1(__VA_ARGS__) |
| #define | vsseg3e16_v_f16m1_m(...) __riscv_vsseg3e16_v_f16m1_m(__VA_ARGS__) |
| #define | vsseg3e16_v_f16m2(...) __riscv_vsseg3e16_v_f16m2(__VA_ARGS__) |
| #define | vsseg3e16_v_f16m2_m(...) __riscv_vsseg3e16_v_f16m2_m(__VA_ARGS__) |
| #define | vsseg3e16_v_f16mf2(...) __riscv_vsseg3e16_v_f16mf2(__VA_ARGS__) |
| #define | vsseg3e16_v_f16mf2_m(...) __riscv_vsseg3e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsseg3e16_v_f16mf4(...) __riscv_vsseg3e16_v_f16mf4(__VA_ARGS__) |
| #define | vsseg3e16_v_f16mf4_m(...) __riscv_vsseg3e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsseg3e16_v_i16m1(...) __riscv_vsseg3e16_v_i16m1(__VA_ARGS__) |
| #define | vsseg3e16_v_i16m1_m(...) __riscv_vsseg3e16_v_i16m1_m(__VA_ARGS__) |
| #define | vsseg3e16_v_i16m2(...) __riscv_vsseg3e16_v_i16m2(__VA_ARGS__) |
| #define | vsseg3e16_v_i16m2_m(...) __riscv_vsseg3e16_v_i16m2_m(__VA_ARGS__) |
| #define | vsseg3e16_v_i16mf2(...) __riscv_vsseg3e16_v_i16mf2(__VA_ARGS__) |
| #define | vsseg3e16_v_i16mf2_m(...) __riscv_vsseg3e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsseg3e16_v_i16mf4(...) __riscv_vsseg3e16_v_i16mf4(__VA_ARGS__) |
| #define | vsseg3e16_v_i16mf4_m(...) __riscv_vsseg3e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsseg3e16_v_u16m1(...) __riscv_vsseg3e16_v_u16m1(__VA_ARGS__) |
| #define | vsseg3e16_v_u16m1_m(...) __riscv_vsseg3e16_v_u16m1_m(__VA_ARGS__) |
| #define | vsseg3e16_v_u16m2(...) __riscv_vsseg3e16_v_u16m2(__VA_ARGS__) |
| #define | vsseg3e16_v_u16m2_m(...) __riscv_vsseg3e16_v_u16m2_m(__VA_ARGS__) |
| #define | vsseg3e16_v_u16mf2(...) __riscv_vsseg3e16_v_u16mf2(__VA_ARGS__) |
| #define | vsseg3e16_v_u16mf2_m(...) __riscv_vsseg3e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsseg3e16_v_u16mf4(...) __riscv_vsseg3e16_v_u16mf4(__VA_ARGS__) |
| #define | vsseg3e16_v_u16mf4_m(...) __riscv_vsseg3e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsseg3e32_v_f32m1(...) __riscv_vsseg3e32_v_f32m1(__VA_ARGS__) |
| #define | vsseg3e32_v_f32m1_m(...) __riscv_vsseg3e32_v_f32m1_m(__VA_ARGS__) |
| #define | vsseg3e32_v_f32m2(...) __riscv_vsseg3e32_v_f32m2(__VA_ARGS__) |
| #define | vsseg3e32_v_f32m2_m(...) __riscv_vsseg3e32_v_f32m2_m(__VA_ARGS__) |
| #define | vsseg3e32_v_f32mf2(...) __riscv_vsseg3e32_v_f32mf2(__VA_ARGS__) |
| #define | vsseg3e32_v_f32mf2_m(...) __riscv_vsseg3e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsseg3e32_v_i32m1(...) __riscv_vsseg3e32_v_i32m1(__VA_ARGS__) |
| #define | vsseg3e32_v_i32m1_m(...) __riscv_vsseg3e32_v_i32m1_m(__VA_ARGS__) |
| #define | vsseg3e32_v_i32m2(...) __riscv_vsseg3e32_v_i32m2(__VA_ARGS__) |
| #define | vsseg3e32_v_i32m2_m(...) __riscv_vsseg3e32_v_i32m2_m(__VA_ARGS__) |
| #define | vsseg3e32_v_i32mf2(...) __riscv_vsseg3e32_v_i32mf2(__VA_ARGS__) |
| #define | vsseg3e32_v_i32mf2_m(...) __riscv_vsseg3e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsseg3e32_v_u32m1(...) __riscv_vsseg3e32_v_u32m1(__VA_ARGS__) |
| #define | vsseg3e32_v_u32m1_m(...) __riscv_vsseg3e32_v_u32m1_m(__VA_ARGS__) |
| #define | vsseg3e32_v_u32m2(...) __riscv_vsseg3e32_v_u32m2(__VA_ARGS__) |
| #define | vsseg3e32_v_u32m2_m(...) __riscv_vsseg3e32_v_u32m2_m(__VA_ARGS__) |
| #define | vsseg3e32_v_u32mf2(...) __riscv_vsseg3e32_v_u32mf2(__VA_ARGS__) |
| #define | vsseg3e32_v_u32mf2_m(...) __riscv_vsseg3e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsseg3e64_v_f64m1(...) __riscv_vsseg3e64_v_f64m1(__VA_ARGS__) |
| #define | vsseg3e64_v_f64m1_m(...) __riscv_vsseg3e64_v_f64m1_m(__VA_ARGS__) |
| #define | vsseg3e64_v_f64m2(...) __riscv_vsseg3e64_v_f64m2(__VA_ARGS__) |
| #define | vsseg3e64_v_f64m2_m(...) __riscv_vsseg3e64_v_f64m2_m(__VA_ARGS__) |
| #define | vsseg3e64_v_i64m1(...) __riscv_vsseg3e64_v_i64m1(__VA_ARGS__) |
| #define | vsseg3e64_v_i64m1_m(...) __riscv_vsseg3e64_v_i64m1_m(__VA_ARGS__) |
| #define | vsseg3e64_v_i64m2(...) __riscv_vsseg3e64_v_i64m2(__VA_ARGS__) |
| #define | vsseg3e64_v_i64m2_m(...) __riscv_vsseg3e64_v_i64m2_m(__VA_ARGS__) |
| #define | vsseg3e64_v_u64m1(...) __riscv_vsseg3e64_v_u64m1(__VA_ARGS__) |
| #define | vsseg3e64_v_u64m1_m(...) __riscv_vsseg3e64_v_u64m1_m(__VA_ARGS__) |
| #define | vsseg3e64_v_u64m2(...) __riscv_vsseg3e64_v_u64m2(__VA_ARGS__) |
| #define | vsseg3e64_v_u64m2_m(...) __riscv_vsseg3e64_v_u64m2_m(__VA_ARGS__) |
| #define | vsseg3e8_v_i8m1(...) __riscv_vsseg3e8_v_i8m1(__VA_ARGS__) |
| #define | vsseg3e8_v_i8m1_m(...) __riscv_vsseg3e8_v_i8m1_m(__VA_ARGS__) |
| #define | vsseg3e8_v_i8m2(...) __riscv_vsseg3e8_v_i8m2(__VA_ARGS__) |
| #define | vsseg3e8_v_i8m2_m(...) __riscv_vsseg3e8_v_i8m2_m(__VA_ARGS__) |
| #define | vsseg3e8_v_i8mf2(...) __riscv_vsseg3e8_v_i8mf2(__VA_ARGS__) |
| #define | vsseg3e8_v_i8mf2_m(...) __riscv_vsseg3e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsseg3e8_v_i8mf4(...) __riscv_vsseg3e8_v_i8mf4(__VA_ARGS__) |
| #define | vsseg3e8_v_i8mf4_m(...) __riscv_vsseg3e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsseg3e8_v_i8mf8(...) __riscv_vsseg3e8_v_i8mf8(__VA_ARGS__) |
| #define | vsseg3e8_v_i8mf8_m(...) __riscv_vsseg3e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsseg3e8_v_u8m1(...) __riscv_vsseg3e8_v_u8m1(__VA_ARGS__) |
| #define | vsseg3e8_v_u8m1_m(...) __riscv_vsseg3e8_v_u8m1_m(__VA_ARGS__) |
| #define | vsseg3e8_v_u8m2(...) __riscv_vsseg3e8_v_u8m2(__VA_ARGS__) |
| #define | vsseg3e8_v_u8m2_m(...) __riscv_vsseg3e8_v_u8m2_m(__VA_ARGS__) |
| #define | vsseg3e8_v_u8mf2(...) __riscv_vsseg3e8_v_u8mf2(__VA_ARGS__) |
| #define | vsseg3e8_v_u8mf2_m(...) __riscv_vsseg3e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsseg3e8_v_u8mf4(...) __riscv_vsseg3e8_v_u8mf4(__VA_ARGS__) |
| #define | vsseg3e8_v_u8mf4_m(...) __riscv_vsseg3e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsseg3e8_v_u8mf8(...) __riscv_vsseg3e8_v_u8mf8(__VA_ARGS__) |
| #define | vsseg3e8_v_u8mf8_m(...) __riscv_vsseg3e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsseg4e16_v_f16m1(...) __riscv_vsseg4e16_v_f16m1(__VA_ARGS__) |
| #define | vsseg4e16_v_f16m1_m(...) __riscv_vsseg4e16_v_f16m1_m(__VA_ARGS__) |
| #define | vsseg4e16_v_f16m2(...) __riscv_vsseg4e16_v_f16m2(__VA_ARGS__) |
| #define | vsseg4e16_v_f16m2_m(...) __riscv_vsseg4e16_v_f16m2_m(__VA_ARGS__) |
| #define | vsseg4e16_v_f16mf2(...) __riscv_vsseg4e16_v_f16mf2(__VA_ARGS__) |
| #define | vsseg4e16_v_f16mf2_m(...) __riscv_vsseg4e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsseg4e16_v_f16mf4(...) __riscv_vsseg4e16_v_f16mf4(__VA_ARGS__) |
| #define | vsseg4e16_v_f16mf4_m(...) __riscv_vsseg4e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsseg4e16_v_i16m1(...) __riscv_vsseg4e16_v_i16m1(__VA_ARGS__) |
| #define | vsseg4e16_v_i16m1_m(...) __riscv_vsseg4e16_v_i16m1_m(__VA_ARGS__) |
| #define | vsseg4e16_v_i16m2(...) __riscv_vsseg4e16_v_i16m2(__VA_ARGS__) |
| #define | vsseg4e16_v_i16m2_m(...) __riscv_vsseg4e16_v_i16m2_m(__VA_ARGS__) |
| #define | vsseg4e16_v_i16mf2(...) __riscv_vsseg4e16_v_i16mf2(__VA_ARGS__) |
| #define | vsseg4e16_v_i16mf2_m(...) __riscv_vsseg4e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsseg4e16_v_i16mf4(...) __riscv_vsseg4e16_v_i16mf4(__VA_ARGS__) |
| #define | vsseg4e16_v_i16mf4_m(...) __riscv_vsseg4e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsseg4e16_v_u16m1(...) __riscv_vsseg4e16_v_u16m1(__VA_ARGS__) |
| #define | vsseg4e16_v_u16m1_m(...) __riscv_vsseg4e16_v_u16m1_m(__VA_ARGS__) |
| #define | vsseg4e16_v_u16m2(...) __riscv_vsseg4e16_v_u16m2(__VA_ARGS__) |
| #define | vsseg4e16_v_u16m2_m(...) __riscv_vsseg4e16_v_u16m2_m(__VA_ARGS__) |
| #define | vsseg4e16_v_u16mf2(...) __riscv_vsseg4e16_v_u16mf2(__VA_ARGS__) |
| #define | vsseg4e16_v_u16mf2_m(...) __riscv_vsseg4e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsseg4e16_v_u16mf4(...) __riscv_vsseg4e16_v_u16mf4(__VA_ARGS__) |
| #define | vsseg4e16_v_u16mf4_m(...) __riscv_vsseg4e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsseg4e32_v_f32m1(...) __riscv_vsseg4e32_v_f32m1(__VA_ARGS__) |
| #define | vsseg4e32_v_f32m1_m(...) __riscv_vsseg4e32_v_f32m1_m(__VA_ARGS__) |
| #define | vsseg4e32_v_f32m2(...) __riscv_vsseg4e32_v_f32m2(__VA_ARGS__) |
| #define | vsseg4e32_v_f32m2_m(...) __riscv_vsseg4e32_v_f32m2_m(__VA_ARGS__) |
| #define | vsseg4e32_v_f32mf2(...) __riscv_vsseg4e32_v_f32mf2(__VA_ARGS__) |
| #define | vsseg4e32_v_f32mf2_m(...) __riscv_vsseg4e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsseg4e32_v_i32m1(...) __riscv_vsseg4e32_v_i32m1(__VA_ARGS__) |
| #define | vsseg4e32_v_i32m1_m(...) __riscv_vsseg4e32_v_i32m1_m(__VA_ARGS__) |
| #define | vsseg4e32_v_i32m2(...) __riscv_vsseg4e32_v_i32m2(__VA_ARGS__) |
| #define | vsseg4e32_v_i32m2_m(...) __riscv_vsseg4e32_v_i32m2_m(__VA_ARGS__) |
| #define | vsseg4e32_v_i32mf2(...) __riscv_vsseg4e32_v_i32mf2(__VA_ARGS__) |
| #define | vsseg4e32_v_i32mf2_m(...) __riscv_vsseg4e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsseg4e32_v_u32m1(...) __riscv_vsseg4e32_v_u32m1(__VA_ARGS__) |
| #define | vsseg4e32_v_u32m1_m(...) __riscv_vsseg4e32_v_u32m1_m(__VA_ARGS__) |
| #define | vsseg4e32_v_u32m2(...) __riscv_vsseg4e32_v_u32m2(__VA_ARGS__) |
| #define | vsseg4e32_v_u32m2_m(...) __riscv_vsseg4e32_v_u32m2_m(__VA_ARGS__) |
| #define | vsseg4e32_v_u32mf2(...) __riscv_vsseg4e32_v_u32mf2(__VA_ARGS__) |
| #define | vsseg4e32_v_u32mf2_m(...) __riscv_vsseg4e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsseg4e64_v_f64m1(...) __riscv_vsseg4e64_v_f64m1(__VA_ARGS__) |
| #define | vsseg4e64_v_f64m1_m(...) __riscv_vsseg4e64_v_f64m1_m(__VA_ARGS__) |
| #define | vsseg4e64_v_f64m2(...) __riscv_vsseg4e64_v_f64m2(__VA_ARGS__) |
| #define | vsseg4e64_v_f64m2_m(...) __riscv_vsseg4e64_v_f64m2_m(__VA_ARGS__) |
| #define | vsseg4e64_v_i64m1(...) __riscv_vsseg4e64_v_i64m1(__VA_ARGS__) |
| #define | vsseg4e64_v_i64m1_m(...) __riscv_vsseg4e64_v_i64m1_m(__VA_ARGS__) |
| #define | vsseg4e64_v_i64m2(...) __riscv_vsseg4e64_v_i64m2(__VA_ARGS__) |
| #define | vsseg4e64_v_i64m2_m(...) __riscv_vsseg4e64_v_i64m2_m(__VA_ARGS__) |
| #define | vsseg4e64_v_u64m1(...) __riscv_vsseg4e64_v_u64m1(__VA_ARGS__) |
| #define | vsseg4e64_v_u64m1_m(...) __riscv_vsseg4e64_v_u64m1_m(__VA_ARGS__) |
| #define | vsseg4e64_v_u64m2(...) __riscv_vsseg4e64_v_u64m2(__VA_ARGS__) |
| #define | vsseg4e64_v_u64m2_m(...) __riscv_vsseg4e64_v_u64m2_m(__VA_ARGS__) |
| #define | vsseg4e8_v_i8m1(...) __riscv_vsseg4e8_v_i8m1(__VA_ARGS__) |
| #define | vsseg4e8_v_i8m1_m(...) __riscv_vsseg4e8_v_i8m1_m(__VA_ARGS__) |
| #define | vsseg4e8_v_i8m2(...) __riscv_vsseg4e8_v_i8m2(__VA_ARGS__) |
| #define | vsseg4e8_v_i8m2_m(...) __riscv_vsseg4e8_v_i8m2_m(__VA_ARGS__) |
| #define | vsseg4e8_v_i8mf2(...) __riscv_vsseg4e8_v_i8mf2(__VA_ARGS__) |
| #define | vsseg4e8_v_i8mf2_m(...) __riscv_vsseg4e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsseg4e8_v_i8mf4(...) __riscv_vsseg4e8_v_i8mf4(__VA_ARGS__) |
| #define | vsseg4e8_v_i8mf4_m(...) __riscv_vsseg4e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsseg4e8_v_i8mf8(...) __riscv_vsseg4e8_v_i8mf8(__VA_ARGS__) |
| #define | vsseg4e8_v_i8mf8_m(...) __riscv_vsseg4e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsseg4e8_v_u8m1(...) __riscv_vsseg4e8_v_u8m1(__VA_ARGS__) |
| #define | vsseg4e8_v_u8m1_m(...) __riscv_vsseg4e8_v_u8m1_m(__VA_ARGS__) |
| #define | vsseg4e8_v_u8m2(...) __riscv_vsseg4e8_v_u8m2(__VA_ARGS__) |
| #define | vsseg4e8_v_u8m2_m(...) __riscv_vsseg4e8_v_u8m2_m(__VA_ARGS__) |
| #define | vsseg4e8_v_u8mf2(...) __riscv_vsseg4e8_v_u8mf2(__VA_ARGS__) |
| #define | vsseg4e8_v_u8mf2_m(...) __riscv_vsseg4e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsseg4e8_v_u8mf4(...) __riscv_vsseg4e8_v_u8mf4(__VA_ARGS__) |
| #define | vsseg4e8_v_u8mf4_m(...) __riscv_vsseg4e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsseg4e8_v_u8mf8(...) __riscv_vsseg4e8_v_u8mf8(__VA_ARGS__) |
| #define | vsseg4e8_v_u8mf8_m(...) __riscv_vsseg4e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsseg5e16_v_f16m1(...) __riscv_vsseg5e16_v_f16m1(__VA_ARGS__) |
| #define | vsseg5e16_v_f16m1_m(...) __riscv_vsseg5e16_v_f16m1_m(__VA_ARGS__) |
| #define | vsseg5e16_v_f16mf2(...) __riscv_vsseg5e16_v_f16mf2(__VA_ARGS__) |
| #define | vsseg5e16_v_f16mf2_m(...) __riscv_vsseg5e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsseg5e16_v_f16mf4(...) __riscv_vsseg5e16_v_f16mf4(__VA_ARGS__) |
| #define | vsseg5e16_v_f16mf4_m(...) __riscv_vsseg5e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsseg5e16_v_i16m1(...) __riscv_vsseg5e16_v_i16m1(__VA_ARGS__) |
| #define | vsseg5e16_v_i16m1_m(...) __riscv_vsseg5e16_v_i16m1_m(__VA_ARGS__) |
| #define | vsseg5e16_v_i16mf2(...) __riscv_vsseg5e16_v_i16mf2(__VA_ARGS__) |
| #define | vsseg5e16_v_i16mf2_m(...) __riscv_vsseg5e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsseg5e16_v_i16mf4(...) __riscv_vsseg5e16_v_i16mf4(__VA_ARGS__) |
| #define | vsseg5e16_v_i16mf4_m(...) __riscv_vsseg5e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsseg5e16_v_u16m1(...) __riscv_vsseg5e16_v_u16m1(__VA_ARGS__) |
| #define | vsseg5e16_v_u16m1_m(...) __riscv_vsseg5e16_v_u16m1_m(__VA_ARGS__) |
| #define | vsseg5e16_v_u16mf2(...) __riscv_vsseg5e16_v_u16mf2(__VA_ARGS__) |
| #define | vsseg5e16_v_u16mf2_m(...) __riscv_vsseg5e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsseg5e16_v_u16mf4(...) __riscv_vsseg5e16_v_u16mf4(__VA_ARGS__) |
| #define | vsseg5e16_v_u16mf4_m(...) __riscv_vsseg5e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsseg5e32_v_f32m1(...) __riscv_vsseg5e32_v_f32m1(__VA_ARGS__) |
| #define | vsseg5e32_v_f32m1_m(...) __riscv_vsseg5e32_v_f32m1_m(__VA_ARGS__) |
| #define | vsseg5e32_v_f32mf2(...) __riscv_vsseg5e32_v_f32mf2(__VA_ARGS__) |
| #define | vsseg5e32_v_f32mf2_m(...) __riscv_vsseg5e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsseg5e32_v_i32m1(...) __riscv_vsseg5e32_v_i32m1(__VA_ARGS__) |
| #define | vsseg5e32_v_i32m1_m(...) __riscv_vsseg5e32_v_i32m1_m(__VA_ARGS__) |
| #define | vsseg5e32_v_i32mf2(...) __riscv_vsseg5e32_v_i32mf2(__VA_ARGS__) |
| #define | vsseg5e32_v_i32mf2_m(...) __riscv_vsseg5e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsseg5e32_v_u32m1(...) __riscv_vsseg5e32_v_u32m1(__VA_ARGS__) |
| #define | vsseg5e32_v_u32m1_m(...) __riscv_vsseg5e32_v_u32m1_m(__VA_ARGS__) |
| #define | vsseg5e32_v_u32mf2(...) __riscv_vsseg5e32_v_u32mf2(__VA_ARGS__) |
| #define | vsseg5e32_v_u32mf2_m(...) __riscv_vsseg5e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsseg5e64_v_f64m1(...) __riscv_vsseg5e64_v_f64m1(__VA_ARGS__) |
| #define | vsseg5e64_v_f64m1_m(...) __riscv_vsseg5e64_v_f64m1_m(__VA_ARGS__) |
| #define | vsseg5e64_v_i64m1(...) __riscv_vsseg5e64_v_i64m1(__VA_ARGS__) |
| #define | vsseg5e64_v_i64m1_m(...) __riscv_vsseg5e64_v_i64m1_m(__VA_ARGS__) |
| #define | vsseg5e64_v_u64m1(...) __riscv_vsseg5e64_v_u64m1(__VA_ARGS__) |
| #define | vsseg5e64_v_u64m1_m(...) __riscv_vsseg5e64_v_u64m1_m(__VA_ARGS__) |
| #define | vsseg5e8_v_i8m1(...) __riscv_vsseg5e8_v_i8m1(__VA_ARGS__) |
| #define | vsseg5e8_v_i8m1_m(...) __riscv_vsseg5e8_v_i8m1_m(__VA_ARGS__) |
| #define | vsseg5e8_v_i8mf2(...) __riscv_vsseg5e8_v_i8mf2(__VA_ARGS__) |
| #define | vsseg5e8_v_i8mf2_m(...) __riscv_vsseg5e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsseg5e8_v_i8mf4(...) __riscv_vsseg5e8_v_i8mf4(__VA_ARGS__) |
| #define | vsseg5e8_v_i8mf4_m(...) __riscv_vsseg5e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsseg5e8_v_i8mf8(...) __riscv_vsseg5e8_v_i8mf8(__VA_ARGS__) |
| #define | vsseg5e8_v_i8mf8_m(...) __riscv_vsseg5e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsseg5e8_v_u8m1(...) __riscv_vsseg5e8_v_u8m1(__VA_ARGS__) |
| #define | vsseg5e8_v_u8m1_m(...) __riscv_vsseg5e8_v_u8m1_m(__VA_ARGS__) |
| #define | vsseg5e8_v_u8mf2(...) __riscv_vsseg5e8_v_u8mf2(__VA_ARGS__) |
| #define | vsseg5e8_v_u8mf2_m(...) __riscv_vsseg5e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsseg5e8_v_u8mf4(...) __riscv_vsseg5e8_v_u8mf4(__VA_ARGS__) |
| #define | vsseg5e8_v_u8mf4_m(...) __riscv_vsseg5e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsseg5e8_v_u8mf8(...) __riscv_vsseg5e8_v_u8mf8(__VA_ARGS__) |
| #define | vsseg5e8_v_u8mf8_m(...) __riscv_vsseg5e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsseg6e16_v_f16m1(...) __riscv_vsseg6e16_v_f16m1(__VA_ARGS__) |
| #define | vsseg6e16_v_f16m1_m(...) __riscv_vsseg6e16_v_f16m1_m(__VA_ARGS__) |
| #define | vsseg6e16_v_f16mf2(...) __riscv_vsseg6e16_v_f16mf2(__VA_ARGS__) |
| #define | vsseg6e16_v_f16mf2_m(...) __riscv_vsseg6e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsseg6e16_v_f16mf4(...) __riscv_vsseg6e16_v_f16mf4(__VA_ARGS__) |
| #define | vsseg6e16_v_f16mf4_m(...) __riscv_vsseg6e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsseg6e16_v_i16m1(...) __riscv_vsseg6e16_v_i16m1(__VA_ARGS__) |
| #define | vsseg6e16_v_i16m1_m(...) __riscv_vsseg6e16_v_i16m1_m(__VA_ARGS__) |
| #define | vsseg6e16_v_i16mf2(...) __riscv_vsseg6e16_v_i16mf2(__VA_ARGS__) |
| #define | vsseg6e16_v_i16mf2_m(...) __riscv_vsseg6e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsseg6e16_v_i16mf4(...) __riscv_vsseg6e16_v_i16mf4(__VA_ARGS__) |
| #define | vsseg6e16_v_i16mf4_m(...) __riscv_vsseg6e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsseg6e16_v_u16m1(...) __riscv_vsseg6e16_v_u16m1(__VA_ARGS__) |
| #define | vsseg6e16_v_u16m1_m(...) __riscv_vsseg6e16_v_u16m1_m(__VA_ARGS__) |
| #define | vsseg6e16_v_u16mf2(...) __riscv_vsseg6e16_v_u16mf2(__VA_ARGS__) |
| #define | vsseg6e16_v_u16mf2_m(...) __riscv_vsseg6e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsseg6e16_v_u16mf4(...) __riscv_vsseg6e16_v_u16mf4(__VA_ARGS__) |
| #define | vsseg6e16_v_u16mf4_m(...) __riscv_vsseg6e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsseg6e32_v_f32m1(...) __riscv_vsseg6e32_v_f32m1(__VA_ARGS__) |
| #define | vsseg6e32_v_f32m1_m(...) __riscv_vsseg6e32_v_f32m1_m(__VA_ARGS__) |
| #define | vsseg6e32_v_f32mf2(...) __riscv_vsseg6e32_v_f32mf2(__VA_ARGS__) |
| #define | vsseg6e32_v_f32mf2_m(...) __riscv_vsseg6e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsseg6e32_v_i32m1(...) __riscv_vsseg6e32_v_i32m1(__VA_ARGS__) |
| #define | vsseg6e32_v_i32m1_m(...) __riscv_vsseg6e32_v_i32m1_m(__VA_ARGS__) |
| #define | vsseg6e32_v_i32mf2(...) __riscv_vsseg6e32_v_i32mf2(__VA_ARGS__) |
| #define | vsseg6e32_v_i32mf2_m(...) __riscv_vsseg6e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsseg6e32_v_u32m1(...) __riscv_vsseg6e32_v_u32m1(__VA_ARGS__) |
| #define | vsseg6e32_v_u32m1_m(...) __riscv_vsseg6e32_v_u32m1_m(__VA_ARGS__) |
| #define | vsseg6e32_v_u32mf2(...) __riscv_vsseg6e32_v_u32mf2(__VA_ARGS__) |
| #define | vsseg6e32_v_u32mf2_m(...) __riscv_vsseg6e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsseg6e64_v_f64m1(...) __riscv_vsseg6e64_v_f64m1(__VA_ARGS__) |
| #define | vsseg6e64_v_f64m1_m(...) __riscv_vsseg6e64_v_f64m1_m(__VA_ARGS__) |
| #define | vsseg6e64_v_i64m1(...) __riscv_vsseg6e64_v_i64m1(__VA_ARGS__) |
| #define | vsseg6e64_v_i64m1_m(...) __riscv_vsseg6e64_v_i64m1_m(__VA_ARGS__) |
| #define | vsseg6e64_v_u64m1(...) __riscv_vsseg6e64_v_u64m1(__VA_ARGS__) |
| #define | vsseg6e64_v_u64m1_m(...) __riscv_vsseg6e64_v_u64m1_m(__VA_ARGS__) |
| #define | vsseg6e8_v_i8m1(...) __riscv_vsseg6e8_v_i8m1(__VA_ARGS__) |
| #define | vsseg6e8_v_i8m1_m(...) __riscv_vsseg6e8_v_i8m1_m(__VA_ARGS__) |
| #define | vsseg6e8_v_i8mf2(...) __riscv_vsseg6e8_v_i8mf2(__VA_ARGS__) |
| #define | vsseg6e8_v_i8mf2_m(...) __riscv_vsseg6e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsseg6e8_v_i8mf4(...) __riscv_vsseg6e8_v_i8mf4(__VA_ARGS__) |
| #define | vsseg6e8_v_i8mf4_m(...) __riscv_vsseg6e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsseg6e8_v_i8mf8(...) __riscv_vsseg6e8_v_i8mf8(__VA_ARGS__) |
| #define | vsseg6e8_v_i8mf8_m(...) __riscv_vsseg6e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsseg6e8_v_u8m1(...) __riscv_vsseg6e8_v_u8m1(__VA_ARGS__) |
| #define | vsseg6e8_v_u8m1_m(...) __riscv_vsseg6e8_v_u8m1_m(__VA_ARGS__) |
| #define | vsseg6e8_v_u8mf2(...) __riscv_vsseg6e8_v_u8mf2(__VA_ARGS__) |
| #define | vsseg6e8_v_u8mf2_m(...) __riscv_vsseg6e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsseg6e8_v_u8mf4(...) __riscv_vsseg6e8_v_u8mf4(__VA_ARGS__) |
| #define | vsseg6e8_v_u8mf4_m(...) __riscv_vsseg6e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsseg6e8_v_u8mf8(...) __riscv_vsseg6e8_v_u8mf8(__VA_ARGS__) |
| #define | vsseg6e8_v_u8mf8_m(...) __riscv_vsseg6e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsseg7e16_v_f16m1(...) __riscv_vsseg7e16_v_f16m1(__VA_ARGS__) |
| #define | vsseg7e16_v_f16m1_m(...) __riscv_vsseg7e16_v_f16m1_m(__VA_ARGS__) |
| #define | vsseg7e16_v_f16mf2(...) __riscv_vsseg7e16_v_f16mf2(__VA_ARGS__) |
| #define | vsseg7e16_v_f16mf2_m(...) __riscv_vsseg7e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsseg7e16_v_f16mf4(...) __riscv_vsseg7e16_v_f16mf4(__VA_ARGS__) |
| #define | vsseg7e16_v_f16mf4_m(...) __riscv_vsseg7e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsseg7e16_v_i16m1(...) __riscv_vsseg7e16_v_i16m1(__VA_ARGS__) |
| #define | vsseg7e16_v_i16m1_m(...) __riscv_vsseg7e16_v_i16m1_m(__VA_ARGS__) |
| #define | vsseg7e16_v_i16mf2(...) __riscv_vsseg7e16_v_i16mf2(__VA_ARGS__) |
| #define | vsseg7e16_v_i16mf2_m(...) __riscv_vsseg7e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsseg7e16_v_i16mf4(...) __riscv_vsseg7e16_v_i16mf4(__VA_ARGS__) |
| #define | vsseg7e16_v_i16mf4_m(...) __riscv_vsseg7e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsseg7e16_v_u16m1(...) __riscv_vsseg7e16_v_u16m1(__VA_ARGS__) |
| #define | vsseg7e16_v_u16m1_m(...) __riscv_vsseg7e16_v_u16m1_m(__VA_ARGS__) |
| #define | vsseg7e16_v_u16mf2(...) __riscv_vsseg7e16_v_u16mf2(__VA_ARGS__) |
| #define | vsseg7e16_v_u16mf2_m(...) __riscv_vsseg7e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsseg7e16_v_u16mf4(...) __riscv_vsseg7e16_v_u16mf4(__VA_ARGS__) |
| #define | vsseg7e16_v_u16mf4_m(...) __riscv_vsseg7e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsseg7e32_v_f32m1(...) __riscv_vsseg7e32_v_f32m1(__VA_ARGS__) |
| #define | vsseg7e32_v_f32m1_m(...) __riscv_vsseg7e32_v_f32m1_m(__VA_ARGS__) |
| #define | vsseg7e32_v_f32mf2(...) __riscv_vsseg7e32_v_f32mf2(__VA_ARGS__) |
| #define | vsseg7e32_v_f32mf2_m(...) __riscv_vsseg7e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsseg7e32_v_i32m1(...) __riscv_vsseg7e32_v_i32m1(__VA_ARGS__) |
| #define | vsseg7e32_v_i32m1_m(...) __riscv_vsseg7e32_v_i32m1_m(__VA_ARGS__) |
| #define | vsseg7e32_v_i32mf2(...) __riscv_vsseg7e32_v_i32mf2(__VA_ARGS__) |
| #define | vsseg7e32_v_i32mf2_m(...) __riscv_vsseg7e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsseg7e32_v_u32m1(...) __riscv_vsseg7e32_v_u32m1(__VA_ARGS__) |
| #define | vsseg7e32_v_u32m1_m(...) __riscv_vsseg7e32_v_u32m1_m(__VA_ARGS__) |
| #define | vsseg7e32_v_u32mf2(...) __riscv_vsseg7e32_v_u32mf2(__VA_ARGS__) |
| #define | vsseg7e32_v_u32mf2_m(...) __riscv_vsseg7e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsseg7e64_v_f64m1(...) __riscv_vsseg7e64_v_f64m1(__VA_ARGS__) |
| #define | vsseg7e64_v_f64m1_m(...) __riscv_vsseg7e64_v_f64m1_m(__VA_ARGS__) |
| #define | vsseg7e64_v_i64m1(...) __riscv_vsseg7e64_v_i64m1(__VA_ARGS__) |
| #define | vsseg7e64_v_i64m1_m(...) __riscv_vsseg7e64_v_i64m1_m(__VA_ARGS__) |
| #define | vsseg7e64_v_u64m1(...) __riscv_vsseg7e64_v_u64m1(__VA_ARGS__) |
| #define | vsseg7e64_v_u64m1_m(...) __riscv_vsseg7e64_v_u64m1_m(__VA_ARGS__) |
| #define | vsseg7e8_v_i8m1(...) __riscv_vsseg7e8_v_i8m1(__VA_ARGS__) |
| #define | vsseg7e8_v_i8m1_m(...) __riscv_vsseg7e8_v_i8m1_m(__VA_ARGS__) |
| #define | vsseg7e8_v_i8mf2(...) __riscv_vsseg7e8_v_i8mf2(__VA_ARGS__) |
| #define | vsseg7e8_v_i8mf2_m(...) __riscv_vsseg7e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsseg7e8_v_i8mf4(...) __riscv_vsseg7e8_v_i8mf4(__VA_ARGS__) |
| #define | vsseg7e8_v_i8mf4_m(...) __riscv_vsseg7e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsseg7e8_v_i8mf8(...) __riscv_vsseg7e8_v_i8mf8(__VA_ARGS__) |
| #define | vsseg7e8_v_i8mf8_m(...) __riscv_vsseg7e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsseg7e8_v_u8m1(...) __riscv_vsseg7e8_v_u8m1(__VA_ARGS__) |
| #define | vsseg7e8_v_u8m1_m(...) __riscv_vsseg7e8_v_u8m1_m(__VA_ARGS__) |
| #define | vsseg7e8_v_u8mf2(...) __riscv_vsseg7e8_v_u8mf2(__VA_ARGS__) |
| #define | vsseg7e8_v_u8mf2_m(...) __riscv_vsseg7e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsseg7e8_v_u8mf4(...) __riscv_vsseg7e8_v_u8mf4(__VA_ARGS__) |
| #define | vsseg7e8_v_u8mf4_m(...) __riscv_vsseg7e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsseg7e8_v_u8mf8(...) __riscv_vsseg7e8_v_u8mf8(__VA_ARGS__) |
| #define | vsseg7e8_v_u8mf8_m(...) __riscv_vsseg7e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsseg8e16_v_f16m1(...) __riscv_vsseg8e16_v_f16m1(__VA_ARGS__) |
| #define | vsseg8e16_v_f16m1_m(...) __riscv_vsseg8e16_v_f16m1_m(__VA_ARGS__) |
| #define | vsseg8e16_v_f16mf2(...) __riscv_vsseg8e16_v_f16mf2(__VA_ARGS__) |
| #define | vsseg8e16_v_f16mf2_m(...) __riscv_vsseg8e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsseg8e16_v_f16mf4(...) __riscv_vsseg8e16_v_f16mf4(__VA_ARGS__) |
| #define | vsseg8e16_v_f16mf4_m(...) __riscv_vsseg8e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsseg8e16_v_i16m1(...) __riscv_vsseg8e16_v_i16m1(__VA_ARGS__) |
| #define | vsseg8e16_v_i16m1_m(...) __riscv_vsseg8e16_v_i16m1_m(__VA_ARGS__) |
| #define | vsseg8e16_v_i16mf2(...) __riscv_vsseg8e16_v_i16mf2(__VA_ARGS__) |
| #define | vsseg8e16_v_i16mf2_m(...) __riscv_vsseg8e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsseg8e16_v_i16mf4(...) __riscv_vsseg8e16_v_i16mf4(__VA_ARGS__) |
| #define | vsseg8e16_v_i16mf4_m(...) __riscv_vsseg8e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsseg8e16_v_u16m1(...) __riscv_vsseg8e16_v_u16m1(__VA_ARGS__) |
| #define | vsseg8e16_v_u16m1_m(...) __riscv_vsseg8e16_v_u16m1_m(__VA_ARGS__) |
| #define | vsseg8e16_v_u16mf2(...) __riscv_vsseg8e16_v_u16mf2(__VA_ARGS__) |
| #define | vsseg8e16_v_u16mf2_m(...) __riscv_vsseg8e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsseg8e16_v_u16mf4(...) __riscv_vsseg8e16_v_u16mf4(__VA_ARGS__) |
| #define | vsseg8e16_v_u16mf4_m(...) __riscv_vsseg8e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsseg8e32_v_f32m1(...) __riscv_vsseg8e32_v_f32m1(__VA_ARGS__) |
| #define | vsseg8e32_v_f32m1_m(...) __riscv_vsseg8e32_v_f32m1_m(__VA_ARGS__) |
| #define | vsseg8e32_v_f32mf2(...) __riscv_vsseg8e32_v_f32mf2(__VA_ARGS__) |
| #define | vsseg8e32_v_f32mf2_m(...) __riscv_vsseg8e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsseg8e32_v_i32m1(...) __riscv_vsseg8e32_v_i32m1(__VA_ARGS__) |
| #define | vsseg8e32_v_i32m1_m(...) __riscv_vsseg8e32_v_i32m1_m(__VA_ARGS__) |
| #define | vsseg8e32_v_i32mf2(...) __riscv_vsseg8e32_v_i32mf2(__VA_ARGS__) |
| #define | vsseg8e32_v_i32mf2_m(...) __riscv_vsseg8e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsseg8e32_v_u32m1(...) __riscv_vsseg8e32_v_u32m1(__VA_ARGS__) |
| #define | vsseg8e32_v_u32m1_m(...) __riscv_vsseg8e32_v_u32m1_m(__VA_ARGS__) |
| #define | vsseg8e32_v_u32mf2(...) __riscv_vsseg8e32_v_u32mf2(__VA_ARGS__) |
| #define | vsseg8e32_v_u32mf2_m(...) __riscv_vsseg8e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsseg8e64_v_f64m1(...) __riscv_vsseg8e64_v_f64m1(__VA_ARGS__) |
| #define | vsseg8e64_v_f64m1_m(...) __riscv_vsseg8e64_v_f64m1_m(__VA_ARGS__) |
| #define | vsseg8e64_v_i64m1(...) __riscv_vsseg8e64_v_i64m1(__VA_ARGS__) |
| #define | vsseg8e64_v_i64m1_m(...) __riscv_vsseg8e64_v_i64m1_m(__VA_ARGS__) |
| #define | vsseg8e64_v_u64m1(...) __riscv_vsseg8e64_v_u64m1(__VA_ARGS__) |
| #define | vsseg8e64_v_u64m1_m(...) __riscv_vsseg8e64_v_u64m1_m(__VA_ARGS__) |
| #define | vsseg8e8_v_i8m1(...) __riscv_vsseg8e8_v_i8m1(__VA_ARGS__) |
| #define | vsseg8e8_v_i8m1_m(...) __riscv_vsseg8e8_v_i8m1_m(__VA_ARGS__) |
| #define | vsseg8e8_v_i8mf2(...) __riscv_vsseg8e8_v_i8mf2(__VA_ARGS__) |
| #define | vsseg8e8_v_i8mf2_m(...) __riscv_vsseg8e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsseg8e8_v_i8mf4(...) __riscv_vsseg8e8_v_i8mf4(__VA_ARGS__) |
| #define | vsseg8e8_v_i8mf4_m(...) __riscv_vsseg8e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsseg8e8_v_i8mf8(...) __riscv_vsseg8e8_v_i8mf8(__VA_ARGS__) |
| #define | vsseg8e8_v_i8mf8_m(...) __riscv_vsseg8e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsseg8e8_v_u8m1(...) __riscv_vsseg8e8_v_u8m1(__VA_ARGS__) |
| #define | vsseg8e8_v_u8m1_m(...) __riscv_vsseg8e8_v_u8m1_m(__VA_ARGS__) |
| #define | vsseg8e8_v_u8mf2(...) __riscv_vsseg8e8_v_u8mf2(__VA_ARGS__) |
| #define | vsseg8e8_v_u8mf2_m(...) __riscv_vsseg8e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsseg8e8_v_u8mf4(...) __riscv_vsseg8e8_v_u8mf4(__VA_ARGS__) |
| #define | vsseg8e8_v_u8mf4_m(...) __riscv_vsseg8e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsseg8e8_v_u8mf8(...) __riscv_vsseg8e8_v_u8mf8(__VA_ARGS__) |
| #define | vsseg8e8_v_u8mf8_m(...) __riscv_vsseg8e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vssra_vv_i16m1(...) __riscv_vssra_vv_i16m1(__VA_ARGS__) |
| #define | vssra_vv_i16m1_m(...) __riscv_vssra_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vssra_vv_i16m2(...) __riscv_vssra_vv_i16m2(__VA_ARGS__) |
| #define | vssra_vv_i16m2_m(...) __riscv_vssra_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vssra_vv_i16m4(...) __riscv_vssra_vv_i16m4(__VA_ARGS__) |
| #define | vssra_vv_i16m4_m(...) __riscv_vssra_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vssra_vv_i16m8(...) __riscv_vssra_vv_i16m8(__VA_ARGS__) |
| #define | vssra_vv_i16m8_m(...) __riscv_vssra_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vssra_vv_i16mf2(...) __riscv_vssra_vv_i16mf2(__VA_ARGS__) |
| #define | vssra_vv_i16mf2_m(...) __riscv_vssra_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vssra_vv_i16mf4(...) __riscv_vssra_vv_i16mf4(__VA_ARGS__) |
| #define | vssra_vv_i16mf4_m(...) __riscv_vssra_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vssra_vv_i32m1(...) __riscv_vssra_vv_i32m1(__VA_ARGS__) |
| #define | vssra_vv_i32m1_m(...) __riscv_vssra_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vssra_vv_i32m2(...) __riscv_vssra_vv_i32m2(__VA_ARGS__) |
| #define | vssra_vv_i32m2_m(...) __riscv_vssra_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vssra_vv_i32m4(...) __riscv_vssra_vv_i32m4(__VA_ARGS__) |
| #define | vssra_vv_i32m4_m(...) __riscv_vssra_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vssra_vv_i32m8(...) __riscv_vssra_vv_i32m8(__VA_ARGS__) |
| #define | vssra_vv_i32m8_m(...) __riscv_vssra_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vssra_vv_i32mf2(...) __riscv_vssra_vv_i32mf2(__VA_ARGS__) |
| #define | vssra_vv_i32mf2_m(...) __riscv_vssra_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vssra_vv_i64m1(...) __riscv_vssra_vv_i64m1(__VA_ARGS__) |
| #define | vssra_vv_i64m1_m(...) __riscv_vssra_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vssra_vv_i64m2(...) __riscv_vssra_vv_i64m2(__VA_ARGS__) |
| #define | vssra_vv_i64m2_m(...) __riscv_vssra_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vssra_vv_i64m4(...) __riscv_vssra_vv_i64m4(__VA_ARGS__) |
| #define | vssra_vv_i64m4_m(...) __riscv_vssra_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vssra_vv_i64m8(...) __riscv_vssra_vv_i64m8(__VA_ARGS__) |
| #define | vssra_vv_i64m8_m(...) __riscv_vssra_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vssra_vv_i8m1(...) __riscv_vssra_vv_i8m1(__VA_ARGS__) |
| #define | vssra_vv_i8m1_m(...) __riscv_vssra_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vssra_vv_i8m2(...) __riscv_vssra_vv_i8m2(__VA_ARGS__) |
| #define | vssra_vv_i8m2_m(...) __riscv_vssra_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vssra_vv_i8m4(...) __riscv_vssra_vv_i8m4(__VA_ARGS__) |
| #define | vssra_vv_i8m4_m(...) __riscv_vssra_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vssra_vv_i8m8(...) __riscv_vssra_vv_i8m8(__VA_ARGS__) |
| #define | vssra_vv_i8m8_m(...) __riscv_vssra_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vssra_vv_i8mf2(...) __riscv_vssra_vv_i8mf2(__VA_ARGS__) |
| #define | vssra_vv_i8mf2_m(...) __riscv_vssra_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vssra_vv_i8mf4(...) __riscv_vssra_vv_i8mf4(__VA_ARGS__) |
| #define | vssra_vv_i8mf4_m(...) __riscv_vssra_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vssra_vv_i8mf8(...) __riscv_vssra_vv_i8mf8(__VA_ARGS__) |
| #define | vssra_vv_i8mf8_m(...) __riscv_vssra_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vssra_vx_i16m1(...) __riscv_vssra_vx_i16m1(__VA_ARGS__) |
| #define | vssra_vx_i16m1_m(...) __riscv_vssra_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vssra_vx_i16m2(...) __riscv_vssra_vx_i16m2(__VA_ARGS__) |
| #define | vssra_vx_i16m2_m(...) __riscv_vssra_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vssra_vx_i16m4(...) __riscv_vssra_vx_i16m4(__VA_ARGS__) |
| #define | vssra_vx_i16m4_m(...) __riscv_vssra_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vssra_vx_i16m8(...) __riscv_vssra_vx_i16m8(__VA_ARGS__) |
| #define | vssra_vx_i16m8_m(...) __riscv_vssra_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vssra_vx_i16mf2(...) __riscv_vssra_vx_i16mf2(__VA_ARGS__) |
| #define | vssra_vx_i16mf2_m(...) __riscv_vssra_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vssra_vx_i16mf4(...) __riscv_vssra_vx_i16mf4(__VA_ARGS__) |
| #define | vssra_vx_i16mf4_m(...) __riscv_vssra_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vssra_vx_i32m1(...) __riscv_vssra_vx_i32m1(__VA_ARGS__) |
| #define | vssra_vx_i32m1_m(...) __riscv_vssra_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vssra_vx_i32m2(...) __riscv_vssra_vx_i32m2(__VA_ARGS__) |
| #define | vssra_vx_i32m2_m(...) __riscv_vssra_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vssra_vx_i32m4(...) __riscv_vssra_vx_i32m4(__VA_ARGS__) |
| #define | vssra_vx_i32m4_m(...) __riscv_vssra_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vssra_vx_i32m8(...) __riscv_vssra_vx_i32m8(__VA_ARGS__) |
| #define | vssra_vx_i32m8_m(...) __riscv_vssra_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vssra_vx_i32mf2(...) __riscv_vssra_vx_i32mf2(__VA_ARGS__) |
| #define | vssra_vx_i32mf2_m(...) __riscv_vssra_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vssra_vx_i64m1(...) __riscv_vssra_vx_i64m1(__VA_ARGS__) |
| #define | vssra_vx_i64m1_m(...) __riscv_vssra_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vssra_vx_i64m2(...) __riscv_vssra_vx_i64m2(__VA_ARGS__) |
| #define | vssra_vx_i64m2_m(...) __riscv_vssra_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vssra_vx_i64m4(...) __riscv_vssra_vx_i64m4(__VA_ARGS__) |
| #define | vssra_vx_i64m4_m(...) __riscv_vssra_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vssra_vx_i64m8(...) __riscv_vssra_vx_i64m8(__VA_ARGS__) |
| #define | vssra_vx_i64m8_m(...) __riscv_vssra_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vssra_vx_i8m1(...) __riscv_vssra_vx_i8m1(__VA_ARGS__) |
| #define | vssra_vx_i8m1_m(...) __riscv_vssra_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vssra_vx_i8m2(...) __riscv_vssra_vx_i8m2(__VA_ARGS__) |
| #define | vssra_vx_i8m2_m(...) __riscv_vssra_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vssra_vx_i8m4(...) __riscv_vssra_vx_i8m4(__VA_ARGS__) |
| #define | vssra_vx_i8m4_m(...) __riscv_vssra_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vssra_vx_i8m8(...) __riscv_vssra_vx_i8m8(__VA_ARGS__) |
| #define | vssra_vx_i8m8_m(...) __riscv_vssra_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vssra_vx_i8mf2(...) __riscv_vssra_vx_i8mf2(__VA_ARGS__) |
| #define | vssra_vx_i8mf2_m(...) __riscv_vssra_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vssra_vx_i8mf4(...) __riscv_vssra_vx_i8mf4(__VA_ARGS__) |
| #define | vssra_vx_i8mf4_m(...) __riscv_vssra_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vssra_vx_i8mf8(...) __riscv_vssra_vx_i8mf8(__VA_ARGS__) |
| #define | vssra_vx_i8mf8_m(...) __riscv_vssra_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u16m1(...) __riscv_vssrl_vv_u16m1(__VA_ARGS__) |
| #define | vssrl_vv_u16m1_m(...) __riscv_vssrl_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u16m2(...) __riscv_vssrl_vv_u16m2(__VA_ARGS__) |
| #define | vssrl_vv_u16m2_m(...) __riscv_vssrl_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u16m4(...) __riscv_vssrl_vv_u16m4(__VA_ARGS__) |
| #define | vssrl_vv_u16m4_m(...) __riscv_vssrl_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u16m8(...) __riscv_vssrl_vv_u16m8(__VA_ARGS__) |
| #define | vssrl_vv_u16m8_m(...) __riscv_vssrl_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u16mf2(...) __riscv_vssrl_vv_u16mf2(__VA_ARGS__) |
| #define | vssrl_vv_u16mf2_m(...) __riscv_vssrl_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u16mf4(...) __riscv_vssrl_vv_u16mf4(__VA_ARGS__) |
| #define | vssrl_vv_u16mf4_m(...) __riscv_vssrl_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u32m1(...) __riscv_vssrl_vv_u32m1(__VA_ARGS__) |
| #define | vssrl_vv_u32m1_m(...) __riscv_vssrl_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u32m2(...) __riscv_vssrl_vv_u32m2(__VA_ARGS__) |
| #define | vssrl_vv_u32m2_m(...) __riscv_vssrl_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u32m4(...) __riscv_vssrl_vv_u32m4(__VA_ARGS__) |
| #define | vssrl_vv_u32m4_m(...) __riscv_vssrl_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u32m8(...) __riscv_vssrl_vv_u32m8(__VA_ARGS__) |
| #define | vssrl_vv_u32m8_m(...) __riscv_vssrl_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u32mf2(...) __riscv_vssrl_vv_u32mf2(__VA_ARGS__) |
| #define | vssrl_vv_u32mf2_m(...) __riscv_vssrl_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u64m1(...) __riscv_vssrl_vv_u64m1(__VA_ARGS__) |
| #define | vssrl_vv_u64m1_m(...) __riscv_vssrl_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u64m2(...) __riscv_vssrl_vv_u64m2(__VA_ARGS__) |
| #define | vssrl_vv_u64m2_m(...) __riscv_vssrl_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u64m4(...) __riscv_vssrl_vv_u64m4(__VA_ARGS__) |
| #define | vssrl_vv_u64m4_m(...) __riscv_vssrl_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u64m8(...) __riscv_vssrl_vv_u64m8(__VA_ARGS__) |
| #define | vssrl_vv_u64m8_m(...) __riscv_vssrl_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u8m1(...) __riscv_vssrl_vv_u8m1(__VA_ARGS__) |
| #define | vssrl_vv_u8m1_m(...) __riscv_vssrl_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u8m2(...) __riscv_vssrl_vv_u8m2(__VA_ARGS__) |
| #define | vssrl_vv_u8m2_m(...) __riscv_vssrl_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u8m4(...) __riscv_vssrl_vv_u8m4(__VA_ARGS__) |
| #define | vssrl_vv_u8m4_m(...) __riscv_vssrl_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u8m8(...) __riscv_vssrl_vv_u8m8(__VA_ARGS__) |
| #define | vssrl_vv_u8m8_m(...) __riscv_vssrl_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u8mf2(...) __riscv_vssrl_vv_u8mf2(__VA_ARGS__) |
| #define | vssrl_vv_u8mf2_m(...) __riscv_vssrl_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u8mf4(...) __riscv_vssrl_vv_u8mf4(__VA_ARGS__) |
| #define | vssrl_vv_u8mf4_m(...) __riscv_vssrl_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vssrl_vv_u8mf8(...) __riscv_vssrl_vv_u8mf8(__VA_ARGS__) |
| #define | vssrl_vv_u8mf8_m(...) __riscv_vssrl_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u16m1(...) __riscv_vssrl_vx_u16m1(__VA_ARGS__) |
| #define | vssrl_vx_u16m1_m(...) __riscv_vssrl_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u16m2(...) __riscv_vssrl_vx_u16m2(__VA_ARGS__) |
| #define | vssrl_vx_u16m2_m(...) __riscv_vssrl_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u16m4(...) __riscv_vssrl_vx_u16m4(__VA_ARGS__) |
| #define | vssrl_vx_u16m4_m(...) __riscv_vssrl_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u16m8(...) __riscv_vssrl_vx_u16m8(__VA_ARGS__) |
| #define | vssrl_vx_u16m8_m(...) __riscv_vssrl_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u16mf2(...) __riscv_vssrl_vx_u16mf2(__VA_ARGS__) |
| #define | vssrl_vx_u16mf2_m(...) __riscv_vssrl_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u16mf4(...) __riscv_vssrl_vx_u16mf4(__VA_ARGS__) |
| #define | vssrl_vx_u16mf4_m(...) __riscv_vssrl_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u32m1(...) __riscv_vssrl_vx_u32m1(__VA_ARGS__) |
| #define | vssrl_vx_u32m1_m(...) __riscv_vssrl_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u32m2(...) __riscv_vssrl_vx_u32m2(__VA_ARGS__) |
| #define | vssrl_vx_u32m2_m(...) __riscv_vssrl_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u32m4(...) __riscv_vssrl_vx_u32m4(__VA_ARGS__) |
| #define | vssrl_vx_u32m4_m(...) __riscv_vssrl_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u32m8(...) __riscv_vssrl_vx_u32m8(__VA_ARGS__) |
| #define | vssrl_vx_u32m8_m(...) __riscv_vssrl_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u32mf2(...) __riscv_vssrl_vx_u32mf2(__VA_ARGS__) |
| #define | vssrl_vx_u32mf2_m(...) __riscv_vssrl_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u64m1(...) __riscv_vssrl_vx_u64m1(__VA_ARGS__) |
| #define | vssrl_vx_u64m1_m(...) __riscv_vssrl_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u64m2(...) __riscv_vssrl_vx_u64m2(__VA_ARGS__) |
| #define | vssrl_vx_u64m2_m(...) __riscv_vssrl_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u64m4(...) __riscv_vssrl_vx_u64m4(__VA_ARGS__) |
| #define | vssrl_vx_u64m4_m(...) __riscv_vssrl_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u64m8(...) __riscv_vssrl_vx_u64m8(__VA_ARGS__) |
| #define | vssrl_vx_u64m8_m(...) __riscv_vssrl_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u8m1(...) __riscv_vssrl_vx_u8m1(__VA_ARGS__) |
| #define | vssrl_vx_u8m1_m(...) __riscv_vssrl_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u8m2(...) __riscv_vssrl_vx_u8m2(__VA_ARGS__) |
| #define | vssrl_vx_u8m2_m(...) __riscv_vssrl_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u8m4(...) __riscv_vssrl_vx_u8m4(__VA_ARGS__) |
| #define | vssrl_vx_u8m4_m(...) __riscv_vssrl_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u8m8(...) __riscv_vssrl_vx_u8m8(__VA_ARGS__) |
| #define | vssrl_vx_u8m8_m(...) __riscv_vssrl_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u8mf2(...) __riscv_vssrl_vx_u8mf2(__VA_ARGS__) |
| #define | vssrl_vx_u8mf2_m(...) __riscv_vssrl_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u8mf4(...) __riscv_vssrl_vx_u8mf4(__VA_ARGS__) |
| #define | vssrl_vx_u8mf4_m(...) __riscv_vssrl_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vssrl_vx_u8mf8(...) __riscv_vssrl_vx_u8mf8(__VA_ARGS__) |
| #define | vssrl_vx_u8mf8_m(...) __riscv_vssrl_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vssseg2e16_v_f16m1(...) __riscv_vssseg2e16_v_f16m1(__VA_ARGS__) |
| #define | vssseg2e16_v_f16m1_m(...) __riscv_vssseg2e16_v_f16m1_m(__VA_ARGS__) |
| #define | vssseg2e16_v_f16m2(...) __riscv_vssseg2e16_v_f16m2(__VA_ARGS__) |
| #define | vssseg2e16_v_f16m2_m(...) __riscv_vssseg2e16_v_f16m2_m(__VA_ARGS__) |
| #define | vssseg2e16_v_f16m4(...) __riscv_vssseg2e16_v_f16m4(__VA_ARGS__) |
| #define | vssseg2e16_v_f16m4_m(...) __riscv_vssseg2e16_v_f16m4_m(__VA_ARGS__) |
| #define | vssseg2e16_v_f16mf2(...) __riscv_vssseg2e16_v_f16mf2(__VA_ARGS__) |
| #define | vssseg2e16_v_f16mf2_m(...) __riscv_vssseg2e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vssseg2e16_v_f16mf4(...) __riscv_vssseg2e16_v_f16mf4(__VA_ARGS__) |
| #define | vssseg2e16_v_f16mf4_m(...) __riscv_vssseg2e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vssseg2e16_v_i16m1(...) __riscv_vssseg2e16_v_i16m1(__VA_ARGS__) |
| #define | vssseg2e16_v_i16m1_m(...) __riscv_vssseg2e16_v_i16m1_m(__VA_ARGS__) |
| #define | vssseg2e16_v_i16m2(...) __riscv_vssseg2e16_v_i16m2(__VA_ARGS__) |
| #define | vssseg2e16_v_i16m2_m(...) __riscv_vssseg2e16_v_i16m2_m(__VA_ARGS__) |
| #define | vssseg2e16_v_i16m4(...) __riscv_vssseg2e16_v_i16m4(__VA_ARGS__) |
| #define | vssseg2e16_v_i16m4_m(...) __riscv_vssseg2e16_v_i16m4_m(__VA_ARGS__) |
| #define | vssseg2e16_v_i16mf2(...) __riscv_vssseg2e16_v_i16mf2(__VA_ARGS__) |
| #define | vssseg2e16_v_i16mf2_m(...) __riscv_vssseg2e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vssseg2e16_v_i16mf4(...) __riscv_vssseg2e16_v_i16mf4(__VA_ARGS__) |
| #define | vssseg2e16_v_i16mf4_m(...) __riscv_vssseg2e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vssseg2e16_v_u16m1(...) __riscv_vssseg2e16_v_u16m1(__VA_ARGS__) |
| #define | vssseg2e16_v_u16m1_m(...) __riscv_vssseg2e16_v_u16m1_m(__VA_ARGS__) |
| #define | vssseg2e16_v_u16m2(...) __riscv_vssseg2e16_v_u16m2(__VA_ARGS__) |
| #define | vssseg2e16_v_u16m2_m(...) __riscv_vssseg2e16_v_u16m2_m(__VA_ARGS__) |
| #define | vssseg2e16_v_u16m4(...) __riscv_vssseg2e16_v_u16m4(__VA_ARGS__) |
| #define | vssseg2e16_v_u16m4_m(...) __riscv_vssseg2e16_v_u16m4_m(__VA_ARGS__) |
| #define | vssseg2e16_v_u16mf2(...) __riscv_vssseg2e16_v_u16mf2(__VA_ARGS__) |
| #define | vssseg2e16_v_u16mf2_m(...) __riscv_vssseg2e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vssseg2e16_v_u16mf4(...) __riscv_vssseg2e16_v_u16mf4(__VA_ARGS__) |
| #define | vssseg2e16_v_u16mf4_m(...) __riscv_vssseg2e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vssseg2e32_v_f32m1(...) __riscv_vssseg2e32_v_f32m1(__VA_ARGS__) |
| #define | vssseg2e32_v_f32m1_m(...) __riscv_vssseg2e32_v_f32m1_m(__VA_ARGS__) |
| #define | vssseg2e32_v_f32m2(...) __riscv_vssseg2e32_v_f32m2(__VA_ARGS__) |
| #define | vssseg2e32_v_f32m2_m(...) __riscv_vssseg2e32_v_f32m2_m(__VA_ARGS__) |
| #define | vssseg2e32_v_f32m4(...) __riscv_vssseg2e32_v_f32m4(__VA_ARGS__) |
| #define | vssseg2e32_v_f32m4_m(...) __riscv_vssseg2e32_v_f32m4_m(__VA_ARGS__) |
| #define | vssseg2e32_v_f32mf2(...) __riscv_vssseg2e32_v_f32mf2(__VA_ARGS__) |
| #define | vssseg2e32_v_f32mf2_m(...) __riscv_vssseg2e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vssseg2e32_v_i32m1(...) __riscv_vssseg2e32_v_i32m1(__VA_ARGS__) |
| #define | vssseg2e32_v_i32m1_m(...) __riscv_vssseg2e32_v_i32m1_m(__VA_ARGS__) |
| #define | vssseg2e32_v_i32m2(...) __riscv_vssseg2e32_v_i32m2(__VA_ARGS__) |
| #define | vssseg2e32_v_i32m2_m(...) __riscv_vssseg2e32_v_i32m2_m(__VA_ARGS__) |
| #define | vssseg2e32_v_i32m4(...) __riscv_vssseg2e32_v_i32m4(__VA_ARGS__) |
| #define | vssseg2e32_v_i32m4_m(...) __riscv_vssseg2e32_v_i32m4_m(__VA_ARGS__) |
| #define | vssseg2e32_v_i32mf2(...) __riscv_vssseg2e32_v_i32mf2(__VA_ARGS__) |
| #define | vssseg2e32_v_i32mf2_m(...) __riscv_vssseg2e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vssseg2e32_v_u32m1(...) __riscv_vssseg2e32_v_u32m1(__VA_ARGS__) |
| #define | vssseg2e32_v_u32m1_m(...) __riscv_vssseg2e32_v_u32m1_m(__VA_ARGS__) |
| #define | vssseg2e32_v_u32m2(...) __riscv_vssseg2e32_v_u32m2(__VA_ARGS__) |
| #define | vssseg2e32_v_u32m2_m(...) __riscv_vssseg2e32_v_u32m2_m(__VA_ARGS__) |
| #define | vssseg2e32_v_u32m4(...) __riscv_vssseg2e32_v_u32m4(__VA_ARGS__) |
| #define | vssseg2e32_v_u32m4_m(...) __riscv_vssseg2e32_v_u32m4_m(__VA_ARGS__) |
| #define | vssseg2e32_v_u32mf2(...) __riscv_vssseg2e32_v_u32mf2(__VA_ARGS__) |
| #define | vssseg2e32_v_u32mf2_m(...) __riscv_vssseg2e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vssseg2e64_v_f64m1(...) __riscv_vssseg2e64_v_f64m1(__VA_ARGS__) |
| #define | vssseg2e64_v_f64m1_m(...) __riscv_vssseg2e64_v_f64m1_m(__VA_ARGS__) |
| #define | vssseg2e64_v_f64m2(...) __riscv_vssseg2e64_v_f64m2(__VA_ARGS__) |
| #define | vssseg2e64_v_f64m2_m(...) __riscv_vssseg2e64_v_f64m2_m(__VA_ARGS__) |
| #define | vssseg2e64_v_f64m4(...) __riscv_vssseg2e64_v_f64m4(__VA_ARGS__) |
| #define | vssseg2e64_v_f64m4_m(...) __riscv_vssseg2e64_v_f64m4_m(__VA_ARGS__) |
| #define | vssseg2e64_v_i64m1(...) __riscv_vssseg2e64_v_i64m1(__VA_ARGS__) |
| #define | vssseg2e64_v_i64m1_m(...) __riscv_vssseg2e64_v_i64m1_m(__VA_ARGS__) |
| #define | vssseg2e64_v_i64m2(...) __riscv_vssseg2e64_v_i64m2(__VA_ARGS__) |
| #define | vssseg2e64_v_i64m2_m(...) __riscv_vssseg2e64_v_i64m2_m(__VA_ARGS__) |
| #define | vssseg2e64_v_i64m4(...) __riscv_vssseg2e64_v_i64m4(__VA_ARGS__) |
| #define | vssseg2e64_v_i64m4_m(...) __riscv_vssseg2e64_v_i64m4_m(__VA_ARGS__) |
| #define | vssseg2e64_v_u64m1(...) __riscv_vssseg2e64_v_u64m1(__VA_ARGS__) |
| #define | vssseg2e64_v_u64m1_m(...) __riscv_vssseg2e64_v_u64m1_m(__VA_ARGS__) |
| #define | vssseg2e64_v_u64m2(...) __riscv_vssseg2e64_v_u64m2(__VA_ARGS__) |
| #define | vssseg2e64_v_u64m2_m(...) __riscv_vssseg2e64_v_u64m2_m(__VA_ARGS__) |
| #define | vssseg2e64_v_u64m4(...) __riscv_vssseg2e64_v_u64m4(__VA_ARGS__) |
| #define | vssseg2e64_v_u64m4_m(...) __riscv_vssseg2e64_v_u64m4_m(__VA_ARGS__) |
| #define | vssseg2e8_v_i8m1(...) __riscv_vssseg2e8_v_i8m1(__VA_ARGS__) |
| #define | vssseg2e8_v_i8m1_m(...) __riscv_vssseg2e8_v_i8m1_m(__VA_ARGS__) |
| #define | vssseg2e8_v_i8m2(...) __riscv_vssseg2e8_v_i8m2(__VA_ARGS__) |
| #define | vssseg2e8_v_i8m2_m(...) __riscv_vssseg2e8_v_i8m2_m(__VA_ARGS__) |
| #define | vssseg2e8_v_i8m4(...) __riscv_vssseg2e8_v_i8m4(__VA_ARGS__) |
| #define | vssseg2e8_v_i8m4_m(...) __riscv_vssseg2e8_v_i8m4_m(__VA_ARGS__) |
| #define | vssseg2e8_v_i8mf2(...) __riscv_vssseg2e8_v_i8mf2(__VA_ARGS__) |
| #define | vssseg2e8_v_i8mf2_m(...) __riscv_vssseg2e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vssseg2e8_v_i8mf4(...) __riscv_vssseg2e8_v_i8mf4(__VA_ARGS__) |
| #define | vssseg2e8_v_i8mf4_m(...) __riscv_vssseg2e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vssseg2e8_v_i8mf8(...) __riscv_vssseg2e8_v_i8mf8(__VA_ARGS__) |
| #define | vssseg2e8_v_i8mf8_m(...) __riscv_vssseg2e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vssseg2e8_v_u8m1(...) __riscv_vssseg2e8_v_u8m1(__VA_ARGS__) |
| #define | vssseg2e8_v_u8m1_m(...) __riscv_vssseg2e8_v_u8m1_m(__VA_ARGS__) |
| #define | vssseg2e8_v_u8m2(...) __riscv_vssseg2e8_v_u8m2(__VA_ARGS__) |
| #define | vssseg2e8_v_u8m2_m(...) __riscv_vssseg2e8_v_u8m2_m(__VA_ARGS__) |
| #define | vssseg2e8_v_u8m4(...) __riscv_vssseg2e8_v_u8m4(__VA_ARGS__) |
| #define | vssseg2e8_v_u8m4_m(...) __riscv_vssseg2e8_v_u8m4_m(__VA_ARGS__) |
| #define | vssseg2e8_v_u8mf2(...) __riscv_vssseg2e8_v_u8mf2(__VA_ARGS__) |
| #define | vssseg2e8_v_u8mf2_m(...) __riscv_vssseg2e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vssseg2e8_v_u8mf4(...) __riscv_vssseg2e8_v_u8mf4(__VA_ARGS__) |
| #define | vssseg2e8_v_u8mf4_m(...) __riscv_vssseg2e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vssseg2e8_v_u8mf8(...) __riscv_vssseg2e8_v_u8mf8(__VA_ARGS__) |
| #define | vssseg2e8_v_u8mf8_m(...) __riscv_vssseg2e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vssseg3e16_v_f16m1(...) __riscv_vssseg3e16_v_f16m1(__VA_ARGS__) |
| #define | vssseg3e16_v_f16m1_m(...) __riscv_vssseg3e16_v_f16m1_m(__VA_ARGS__) |
| #define | vssseg3e16_v_f16m2(...) __riscv_vssseg3e16_v_f16m2(__VA_ARGS__) |
| #define | vssseg3e16_v_f16m2_m(...) __riscv_vssseg3e16_v_f16m2_m(__VA_ARGS__) |
| #define | vssseg3e16_v_f16mf2(...) __riscv_vssseg3e16_v_f16mf2(__VA_ARGS__) |
| #define | vssseg3e16_v_f16mf2_m(...) __riscv_vssseg3e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vssseg3e16_v_f16mf4(...) __riscv_vssseg3e16_v_f16mf4(__VA_ARGS__) |
| #define | vssseg3e16_v_f16mf4_m(...) __riscv_vssseg3e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vssseg3e16_v_i16m1(...) __riscv_vssseg3e16_v_i16m1(__VA_ARGS__) |
| #define | vssseg3e16_v_i16m1_m(...) __riscv_vssseg3e16_v_i16m1_m(__VA_ARGS__) |
| #define | vssseg3e16_v_i16m2(...) __riscv_vssseg3e16_v_i16m2(__VA_ARGS__) |
| #define | vssseg3e16_v_i16m2_m(...) __riscv_vssseg3e16_v_i16m2_m(__VA_ARGS__) |
| #define | vssseg3e16_v_i16mf2(...) __riscv_vssseg3e16_v_i16mf2(__VA_ARGS__) |
| #define | vssseg3e16_v_i16mf2_m(...) __riscv_vssseg3e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vssseg3e16_v_i16mf4(...) __riscv_vssseg3e16_v_i16mf4(__VA_ARGS__) |
| #define | vssseg3e16_v_i16mf4_m(...) __riscv_vssseg3e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vssseg3e16_v_u16m1(...) __riscv_vssseg3e16_v_u16m1(__VA_ARGS__) |
| #define | vssseg3e16_v_u16m1_m(...) __riscv_vssseg3e16_v_u16m1_m(__VA_ARGS__) |
| #define | vssseg3e16_v_u16m2(...) __riscv_vssseg3e16_v_u16m2(__VA_ARGS__) |
| #define | vssseg3e16_v_u16m2_m(...) __riscv_vssseg3e16_v_u16m2_m(__VA_ARGS__) |
| #define | vssseg3e16_v_u16mf2(...) __riscv_vssseg3e16_v_u16mf2(__VA_ARGS__) |
| #define | vssseg3e16_v_u16mf2_m(...) __riscv_vssseg3e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vssseg3e16_v_u16mf4(...) __riscv_vssseg3e16_v_u16mf4(__VA_ARGS__) |
| #define | vssseg3e16_v_u16mf4_m(...) __riscv_vssseg3e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vssseg3e32_v_f32m1(...) __riscv_vssseg3e32_v_f32m1(__VA_ARGS__) |
| #define | vssseg3e32_v_f32m1_m(...) __riscv_vssseg3e32_v_f32m1_m(__VA_ARGS__) |
| #define | vssseg3e32_v_f32m2(...) __riscv_vssseg3e32_v_f32m2(__VA_ARGS__) |
| #define | vssseg3e32_v_f32m2_m(...) __riscv_vssseg3e32_v_f32m2_m(__VA_ARGS__) |
| #define | vssseg3e32_v_f32mf2(...) __riscv_vssseg3e32_v_f32mf2(__VA_ARGS__) |
| #define | vssseg3e32_v_f32mf2_m(...) __riscv_vssseg3e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vssseg3e32_v_i32m1(...) __riscv_vssseg3e32_v_i32m1(__VA_ARGS__) |
| #define | vssseg3e32_v_i32m1_m(...) __riscv_vssseg3e32_v_i32m1_m(__VA_ARGS__) |
| #define | vssseg3e32_v_i32m2(...) __riscv_vssseg3e32_v_i32m2(__VA_ARGS__) |
| #define | vssseg3e32_v_i32m2_m(...) __riscv_vssseg3e32_v_i32m2_m(__VA_ARGS__) |
| #define | vssseg3e32_v_i32mf2(...) __riscv_vssseg3e32_v_i32mf2(__VA_ARGS__) |
| #define | vssseg3e32_v_i32mf2_m(...) __riscv_vssseg3e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vssseg3e32_v_u32m1(...) __riscv_vssseg3e32_v_u32m1(__VA_ARGS__) |
| #define | vssseg3e32_v_u32m1_m(...) __riscv_vssseg3e32_v_u32m1_m(__VA_ARGS__) |
| #define | vssseg3e32_v_u32m2(...) __riscv_vssseg3e32_v_u32m2(__VA_ARGS__) |
| #define | vssseg3e32_v_u32m2_m(...) __riscv_vssseg3e32_v_u32m2_m(__VA_ARGS__) |
| #define | vssseg3e32_v_u32mf2(...) __riscv_vssseg3e32_v_u32mf2(__VA_ARGS__) |
| #define | vssseg3e32_v_u32mf2_m(...) __riscv_vssseg3e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vssseg3e64_v_f64m1(...) __riscv_vssseg3e64_v_f64m1(__VA_ARGS__) |
| #define | vssseg3e64_v_f64m1_m(...) __riscv_vssseg3e64_v_f64m1_m(__VA_ARGS__) |
| #define | vssseg3e64_v_f64m2(...) __riscv_vssseg3e64_v_f64m2(__VA_ARGS__) |
| #define | vssseg3e64_v_f64m2_m(...) __riscv_vssseg3e64_v_f64m2_m(__VA_ARGS__) |
| #define | vssseg3e64_v_i64m1(...) __riscv_vssseg3e64_v_i64m1(__VA_ARGS__) |
| #define | vssseg3e64_v_i64m1_m(...) __riscv_vssseg3e64_v_i64m1_m(__VA_ARGS__) |
| #define | vssseg3e64_v_i64m2(...) __riscv_vssseg3e64_v_i64m2(__VA_ARGS__) |
| #define | vssseg3e64_v_i64m2_m(...) __riscv_vssseg3e64_v_i64m2_m(__VA_ARGS__) |
| #define | vssseg3e64_v_u64m1(...) __riscv_vssseg3e64_v_u64m1(__VA_ARGS__) |
| #define | vssseg3e64_v_u64m1_m(...) __riscv_vssseg3e64_v_u64m1_m(__VA_ARGS__) |
| #define | vssseg3e64_v_u64m2(...) __riscv_vssseg3e64_v_u64m2(__VA_ARGS__) |
| #define | vssseg3e64_v_u64m2_m(...) __riscv_vssseg3e64_v_u64m2_m(__VA_ARGS__) |
| #define | vssseg3e8_v_i8m1(...) __riscv_vssseg3e8_v_i8m1(__VA_ARGS__) |
| #define | vssseg3e8_v_i8m1_m(...) __riscv_vssseg3e8_v_i8m1_m(__VA_ARGS__) |
| #define | vssseg3e8_v_i8m2(...) __riscv_vssseg3e8_v_i8m2(__VA_ARGS__) |
| #define | vssseg3e8_v_i8m2_m(...) __riscv_vssseg3e8_v_i8m2_m(__VA_ARGS__) |
| #define | vssseg3e8_v_i8mf2(...) __riscv_vssseg3e8_v_i8mf2(__VA_ARGS__) |
| #define | vssseg3e8_v_i8mf2_m(...) __riscv_vssseg3e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vssseg3e8_v_i8mf4(...) __riscv_vssseg3e8_v_i8mf4(__VA_ARGS__) |
| #define | vssseg3e8_v_i8mf4_m(...) __riscv_vssseg3e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vssseg3e8_v_i8mf8(...) __riscv_vssseg3e8_v_i8mf8(__VA_ARGS__) |
| #define | vssseg3e8_v_i8mf8_m(...) __riscv_vssseg3e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vssseg3e8_v_u8m1(...) __riscv_vssseg3e8_v_u8m1(__VA_ARGS__) |
| #define | vssseg3e8_v_u8m1_m(...) __riscv_vssseg3e8_v_u8m1_m(__VA_ARGS__) |
| #define | vssseg3e8_v_u8m2(...) __riscv_vssseg3e8_v_u8m2(__VA_ARGS__) |
| #define | vssseg3e8_v_u8m2_m(...) __riscv_vssseg3e8_v_u8m2_m(__VA_ARGS__) |
| #define | vssseg3e8_v_u8mf2(...) __riscv_vssseg3e8_v_u8mf2(__VA_ARGS__) |
| #define | vssseg3e8_v_u8mf2_m(...) __riscv_vssseg3e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vssseg3e8_v_u8mf4(...) __riscv_vssseg3e8_v_u8mf4(__VA_ARGS__) |
| #define | vssseg3e8_v_u8mf4_m(...) __riscv_vssseg3e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vssseg3e8_v_u8mf8(...) __riscv_vssseg3e8_v_u8mf8(__VA_ARGS__) |
| #define | vssseg3e8_v_u8mf8_m(...) __riscv_vssseg3e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vssseg4e16_v_f16m1(...) __riscv_vssseg4e16_v_f16m1(__VA_ARGS__) |
| #define | vssseg4e16_v_f16m1_m(...) __riscv_vssseg4e16_v_f16m1_m(__VA_ARGS__) |
| #define | vssseg4e16_v_f16m2(...) __riscv_vssseg4e16_v_f16m2(__VA_ARGS__) |
| #define | vssseg4e16_v_f16m2_m(...) __riscv_vssseg4e16_v_f16m2_m(__VA_ARGS__) |
| #define | vssseg4e16_v_f16mf2(...) __riscv_vssseg4e16_v_f16mf2(__VA_ARGS__) |
| #define | vssseg4e16_v_f16mf2_m(...) __riscv_vssseg4e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vssseg4e16_v_f16mf4(...) __riscv_vssseg4e16_v_f16mf4(__VA_ARGS__) |
| #define | vssseg4e16_v_f16mf4_m(...) __riscv_vssseg4e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vssseg4e16_v_i16m1(...) __riscv_vssseg4e16_v_i16m1(__VA_ARGS__) |
| #define | vssseg4e16_v_i16m1_m(...) __riscv_vssseg4e16_v_i16m1_m(__VA_ARGS__) |
| #define | vssseg4e16_v_i16m2(...) __riscv_vssseg4e16_v_i16m2(__VA_ARGS__) |
| #define | vssseg4e16_v_i16m2_m(...) __riscv_vssseg4e16_v_i16m2_m(__VA_ARGS__) |
| #define | vssseg4e16_v_i16mf2(...) __riscv_vssseg4e16_v_i16mf2(__VA_ARGS__) |
| #define | vssseg4e16_v_i16mf2_m(...) __riscv_vssseg4e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vssseg4e16_v_i16mf4(...) __riscv_vssseg4e16_v_i16mf4(__VA_ARGS__) |
| #define | vssseg4e16_v_i16mf4_m(...) __riscv_vssseg4e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vssseg4e16_v_u16m1(...) __riscv_vssseg4e16_v_u16m1(__VA_ARGS__) |
| #define | vssseg4e16_v_u16m1_m(...) __riscv_vssseg4e16_v_u16m1_m(__VA_ARGS__) |
| #define | vssseg4e16_v_u16m2(...) __riscv_vssseg4e16_v_u16m2(__VA_ARGS__) |
| #define | vssseg4e16_v_u16m2_m(...) __riscv_vssseg4e16_v_u16m2_m(__VA_ARGS__) |
| #define | vssseg4e16_v_u16mf2(...) __riscv_vssseg4e16_v_u16mf2(__VA_ARGS__) |
| #define | vssseg4e16_v_u16mf2_m(...) __riscv_vssseg4e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vssseg4e16_v_u16mf4(...) __riscv_vssseg4e16_v_u16mf4(__VA_ARGS__) |
| #define | vssseg4e16_v_u16mf4_m(...) __riscv_vssseg4e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vssseg4e32_v_f32m1(...) __riscv_vssseg4e32_v_f32m1(__VA_ARGS__) |
| #define | vssseg4e32_v_f32m1_m(...) __riscv_vssseg4e32_v_f32m1_m(__VA_ARGS__) |
| #define | vssseg4e32_v_f32m2(...) __riscv_vssseg4e32_v_f32m2(__VA_ARGS__) |
| #define | vssseg4e32_v_f32m2_m(...) __riscv_vssseg4e32_v_f32m2_m(__VA_ARGS__) |
| #define | vssseg4e32_v_f32mf2(...) __riscv_vssseg4e32_v_f32mf2(__VA_ARGS__) |
| #define | vssseg4e32_v_f32mf2_m(...) __riscv_vssseg4e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vssseg4e32_v_i32m1(...) __riscv_vssseg4e32_v_i32m1(__VA_ARGS__) |
| #define | vssseg4e32_v_i32m1_m(...) __riscv_vssseg4e32_v_i32m1_m(__VA_ARGS__) |
| #define | vssseg4e32_v_i32m2(...) __riscv_vssseg4e32_v_i32m2(__VA_ARGS__) |
| #define | vssseg4e32_v_i32m2_m(...) __riscv_vssseg4e32_v_i32m2_m(__VA_ARGS__) |
| #define | vssseg4e32_v_i32mf2(...) __riscv_vssseg4e32_v_i32mf2(__VA_ARGS__) |
| #define | vssseg4e32_v_i32mf2_m(...) __riscv_vssseg4e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vssseg4e32_v_u32m1(...) __riscv_vssseg4e32_v_u32m1(__VA_ARGS__) |
| #define | vssseg4e32_v_u32m1_m(...) __riscv_vssseg4e32_v_u32m1_m(__VA_ARGS__) |
| #define | vssseg4e32_v_u32m2(...) __riscv_vssseg4e32_v_u32m2(__VA_ARGS__) |
| #define | vssseg4e32_v_u32m2_m(...) __riscv_vssseg4e32_v_u32m2_m(__VA_ARGS__) |
| #define | vssseg4e32_v_u32mf2(...) __riscv_vssseg4e32_v_u32mf2(__VA_ARGS__) |
| #define | vssseg4e32_v_u32mf2_m(...) __riscv_vssseg4e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vssseg4e64_v_f64m1(...) __riscv_vssseg4e64_v_f64m1(__VA_ARGS__) |
| #define | vssseg4e64_v_f64m1_m(...) __riscv_vssseg4e64_v_f64m1_m(__VA_ARGS__) |
| #define | vssseg4e64_v_f64m2(...) __riscv_vssseg4e64_v_f64m2(__VA_ARGS__) |
| #define | vssseg4e64_v_f64m2_m(...) __riscv_vssseg4e64_v_f64m2_m(__VA_ARGS__) |
| #define | vssseg4e64_v_i64m1(...) __riscv_vssseg4e64_v_i64m1(__VA_ARGS__) |
| #define | vssseg4e64_v_i64m1_m(...) __riscv_vssseg4e64_v_i64m1_m(__VA_ARGS__) |
| #define | vssseg4e64_v_i64m2(...) __riscv_vssseg4e64_v_i64m2(__VA_ARGS__) |
| #define | vssseg4e64_v_i64m2_m(...) __riscv_vssseg4e64_v_i64m2_m(__VA_ARGS__) |
| #define | vssseg4e64_v_u64m1(...) __riscv_vssseg4e64_v_u64m1(__VA_ARGS__) |
| #define | vssseg4e64_v_u64m1_m(...) __riscv_vssseg4e64_v_u64m1_m(__VA_ARGS__) |
| #define | vssseg4e64_v_u64m2(...) __riscv_vssseg4e64_v_u64m2(__VA_ARGS__) |
| #define | vssseg4e64_v_u64m2_m(...) __riscv_vssseg4e64_v_u64m2_m(__VA_ARGS__) |
| #define | vssseg4e8_v_i8m1(...) __riscv_vssseg4e8_v_i8m1(__VA_ARGS__) |
| #define | vssseg4e8_v_i8m1_m(...) __riscv_vssseg4e8_v_i8m1_m(__VA_ARGS__) |
| #define | vssseg4e8_v_i8m2(...) __riscv_vssseg4e8_v_i8m2(__VA_ARGS__) |
| #define | vssseg4e8_v_i8m2_m(...) __riscv_vssseg4e8_v_i8m2_m(__VA_ARGS__) |
| #define | vssseg4e8_v_i8mf2(...) __riscv_vssseg4e8_v_i8mf2(__VA_ARGS__) |
| #define | vssseg4e8_v_i8mf2_m(...) __riscv_vssseg4e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vssseg4e8_v_i8mf4(...) __riscv_vssseg4e8_v_i8mf4(__VA_ARGS__) |
| #define | vssseg4e8_v_i8mf4_m(...) __riscv_vssseg4e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vssseg4e8_v_i8mf8(...) __riscv_vssseg4e8_v_i8mf8(__VA_ARGS__) |
| #define | vssseg4e8_v_i8mf8_m(...) __riscv_vssseg4e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vssseg4e8_v_u8m1(...) __riscv_vssseg4e8_v_u8m1(__VA_ARGS__) |
| #define | vssseg4e8_v_u8m1_m(...) __riscv_vssseg4e8_v_u8m1_m(__VA_ARGS__) |
| #define | vssseg4e8_v_u8m2(...) __riscv_vssseg4e8_v_u8m2(__VA_ARGS__) |
| #define | vssseg4e8_v_u8m2_m(...) __riscv_vssseg4e8_v_u8m2_m(__VA_ARGS__) |
| #define | vssseg4e8_v_u8mf2(...) __riscv_vssseg4e8_v_u8mf2(__VA_ARGS__) |
| #define | vssseg4e8_v_u8mf2_m(...) __riscv_vssseg4e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vssseg4e8_v_u8mf4(...) __riscv_vssseg4e8_v_u8mf4(__VA_ARGS__) |
| #define | vssseg4e8_v_u8mf4_m(...) __riscv_vssseg4e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vssseg4e8_v_u8mf8(...) __riscv_vssseg4e8_v_u8mf8(__VA_ARGS__) |
| #define | vssseg4e8_v_u8mf8_m(...) __riscv_vssseg4e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vssseg5e16_v_f16m1(...) __riscv_vssseg5e16_v_f16m1(__VA_ARGS__) |
| #define | vssseg5e16_v_f16m1_m(...) __riscv_vssseg5e16_v_f16m1_m(__VA_ARGS__) |
| #define | vssseg5e16_v_f16mf2(...) __riscv_vssseg5e16_v_f16mf2(__VA_ARGS__) |
| #define | vssseg5e16_v_f16mf2_m(...) __riscv_vssseg5e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vssseg5e16_v_f16mf4(...) __riscv_vssseg5e16_v_f16mf4(__VA_ARGS__) |
| #define | vssseg5e16_v_f16mf4_m(...) __riscv_vssseg5e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vssseg5e16_v_i16m1(...) __riscv_vssseg5e16_v_i16m1(__VA_ARGS__) |
| #define | vssseg5e16_v_i16m1_m(...) __riscv_vssseg5e16_v_i16m1_m(__VA_ARGS__) |
| #define | vssseg5e16_v_i16mf2(...) __riscv_vssseg5e16_v_i16mf2(__VA_ARGS__) |
| #define | vssseg5e16_v_i16mf2_m(...) __riscv_vssseg5e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vssseg5e16_v_i16mf4(...) __riscv_vssseg5e16_v_i16mf4(__VA_ARGS__) |
| #define | vssseg5e16_v_i16mf4_m(...) __riscv_vssseg5e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vssseg5e16_v_u16m1(...) __riscv_vssseg5e16_v_u16m1(__VA_ARGS__) |
| #define | vssseg5e16_v_u16m1_m(...) __riscv_vssseg5e16_v_u16m1_m(__VA_ARGS__) |
| #define | vssseg5e16_v_u16mf2(...) __riscv_vssseg5e16_v_u16mf2(__VA_ARGS__) |
| #define | vssseg5e16_v_u16mf2_m(...) __riscv_vssseg5e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vssseg5e16_v_u16mf4(...) __riscv_vssseg5e16_v_u16mf4(__VA_ARGS__) |
| #define | vssseg5e16_v_u16mf4_m(...) __riscv_vssseg5e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vssseg5e32_v_f32m1(...) __riscv_vssseg5e32_v_f32m1(__VA_ARGS__) |
| #define | vssseg5e32_v_f32m1_m(...) __riscv_vssseg5e32_v_f32m1_m(__VA_ARGS__) |
| #define | vssseg5e32_v_f32mf2(...) __riscv_vssseg5e32_v_f32mf2(__VA_ARGS__) |
| #define | vssseg5e32_v_f32mf2_m(...) __riscv_vssseg5e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vssseg5e32_v_i32m1(...) __riscv_vssseg5e32_v_i32m1(__VA_ARGS__) |
| #define | vssseg5e32_v_i32m1_m(...) __riscv_vssseg5e32_v_i32m1_m(__VA_ARGS__) |
| #define | vssseg5e32_v_i32mf2(...) __riscv_vssseg5e32_v_i32mf2(__VA_ARGS__) |
| #define | vssseg5e32_v_i32mf2_m(...) __riscv_vssseg5e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vssseg5e32_v_u32m1(...) __riscv_vssseg5e32_v_u32m1(__VA_ARGS__) |
| #define | vssseg5e32_v_u32m1_m(...) __riscv_vssseg5e32_v_u32m1_m(__VA_ARGS__) |
| #define | vssseg5e32_v_u32mf2(...) __riscv_vssseg5e32_v_u32mf2(__VA_ARGS__) |
| #define | vssseg5e32_v_u32mf2_m(...) __riscv_vssseg5e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vssseg5e64_v_f64m1(...) __riscv_vssseg5e64_v_f64m1(__VA_ARGS__) |
| #define | vssseg5e64_v_f64m1_m(...) __riscv_vssseg5e64_v_f64m1_m(__VA_ARGS__) |
| #define | vssseg5e64_v_i64m1(...) __riscv_vssseg5e64_v_i64m1(__VA_ARGS__) |
| #define | vssseg5e64_v_i64m1_m(...) __riscv_vssseg5e64_v_i64m1_m(__VA_ARGS__) |
| #define | vssseg5e64_v_u64m1(...) __riscv_vssseg5e64_v_u64m1(__VA_ARGS__) |
| #define | vssseg5e64_v_u64m1_m(...) __riscv_vssseg5e64_v_u64m1_m(__VA_ARGS__) |
| #define | vssseg5e8_v_i8m1(...) __riscv_vssseg5e8_v_i8m1(__VA_ARGS__) |
| #define | vssseg5e8_v_i8m1_m(...) __riscv_vssseg5e8_v_i8m1_m(__VA_ARGS__) |
| #define | vssseg5e8_v_i8mf2(...) __riscv_vssseg5e8_v_i8mf2(__VA_ARGS__) |
| #define | vssseg5e8_v_i8mf2_m(...) __riscv_vssseg5e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vssseg5e8_v_i8mf4(...) __riscv_vssseg5e8_v_i8mf4(__VA_ARGS__) |
| #define | vssseg5e8_v_i8mf4_m(...) __riscv_vssseg5e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vssseg5e8_v_i8mf8(...) __riscv_vssseg5e8_v_i8mf8(__VA_ARGS__) |
| #define | vssseg5e8_v_i8mf8_m(...) __riscv_vssseg5e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vssseg5e8_v_u8m1(...) __riscv_vssseg5e8_v_u8m1(__VA_ARGS__) |
| #define | vssseg5e8_v_u8m1_m(...) __riscv_vssseg5e8_v_u8m1_m(__VA_ARGS__) |
| #define | vssseg5e8_v_u8mf2(...) __riscv_vssseg5e8_v_u8mf2(__VA_ARGS__) |
| #define | vssseg5e8_v_u8mf2_m(...) __riscv_vssseg5e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vssseg5e8_v_u8mf4(...) __riscv_vssseg5e8_v_u8mf4(__VA_ARGS__) |
| #define | vssseg5e8_v_u8mf4_m(...) __riscv_vssseg5e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vssseg5e8_v_u8mf8(...) __riscv_vssseg5e8_v_u8mf8(__VA_ARGS__) |
| #define | vssseg5e8_v_u8mf8_m(...) __riscv_vssseg5e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vssseg6e16_v_f16m1(...) __riscv_vssseg6e16_v_f16m1(__VA_ARGS__) |
| #define | vssseg6e16_v_f16m1_m(...) __riscv_vssseg6e16_v_f16m1_m(__VA_ARGS__) |
| #define | vssseg6e16_v_f16mf2(...) __riscv_vssseg6e16_v_f16mf2(__VA_ARGS__) |
| #define | vssseg6e16_v_f16mf2_m(...) __riscv_vssseg6e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vssseg6e16_v_f16mf4(...) __riscv_vssseg6e16_v_f16mf4(__VA_ARGS__) |
| #define | vssseg6e16_v_f16mf4_m(...) __riscv_vssseg6e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vssseg6e16_v_i16m1(...) __riscv_vssseg6e16_v_i16m1(__VA_ARGS__) |
| #define | vssseg6e16_v_i16m1_m(...) __riscv_vssseg6e16_v_i16m1_m(__VA_ARGS__) |
| #define | vssseg6e16_v_i16mf2(...) __riscv_vssseg6e16_v_i16mf2(__VA_ARGS__) |
| #define | vssseg6e16_v_i16mf2_m(...) __riscv_vssseg6e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vssseg6e16_v_i16mf4(...) __riscv_vssseg6e16_v_i16mf4(__VA_ARGS__) |
| #define | vssseg6e16_v_i16mf4_m(...) __riscv_vssseg6e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vssseg6e16_v_u16m1(...) __riscv_vssseg6e16_v_u16m1(__VA_ARGS__) |
| #define | vssseg6e16_v_u16m1_m(...) __riscv_vssseg6e16_v_u16m1_m(__VA_ARGS__) |
| #define | vssseg6e16_v_u16mf2(...) __riscv_vssseg6e16_v_u16mf2(__VA_ARGS__) |
| #define | vssseg6e16_v_u16mf2_m(...) __riscv_vssseg6e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vssseg6e16_v_u16mf4(...) __riscv_vssseg6e16_v_u16mf4(__VA_ARGS__) |
| #define | vssseg6e16_v_u16mf4_m(...) __riscv_vssseg6e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vssseg6e32_v_f32m1(...) __riscv_vssseg6e32_v_f32m1(__VA_ARGS__) |
| #define | vssseg6e32_v_f32m1_m(...) __riscv_vssseg6e32_v_f32m1_m(__VA_ARGS__) |
| #define | vssseg6e32_v_f32mf2(...) __riscv_vssseg6e32_v_f32mf2(__VA_ARGS__) |
| #define | vssseg6e32_v_f32mf2_m(...) __riscv_vssseg6e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vssseg6e32_v_i32m1(...) __riscv_vssseg6e32_v_i32m1(__VA_ARGS__) |
| #define | vssseg6e32_v_i32m1_m(...) __riscv_vssseg6e32_v_i32m1_m(__VA_ARGS__) |
| #define | vssseg6e32_v_i32mf2(...) __riscv_vssseg6e32_v_i32mf2(__VA_ARGS__) |
| #define | vssseg6e32_v_i32mf2_m(...) __riscv_vssseg6e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vssseg6e32_v_u32m1(...) __riscv_vssseg6e32_v_u32m1(__VA_ARGS__) |
| #define | vssseg6e32_v_u32m1_m(...) __riscv_vssseg6e32_v_u32m1_m(__VA_ARGS__) |
| #define | vssseg6e32_v_u32mf2(...) __riscv_vssseg6e32_v_u32mf2(__VA_ARGS__) |
| #define | vssseg6e32_v_u32mf2_m(...) __riscv_vssseg6e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vssseg6e64_v_f64m1(...) __riscv_vssseg6e64_v_f64m1(__VA_ARGS__) |
| #define | vssseg6e64_v_f64m1_m(...) __riscv_vssseg6e64_v_f64m1_m(__VA_ARGS__) |
| #define | vssseg6e64_v_i64m1(...) __riscv_vssseg6e64_v_i64m1(__VA_ARGS__) |
| #define | vssseg6e64_v_i64m1_m(...) __riscv_vssseg6e64_v_i64m1_m(__VA_ARGS__) |
| #define | vssseg6e64_v_u64m1(...) __riscv_vssseg6e64_v_u64m1(__VA_ARGS__) |
| #define | vssseg6e64_v_u64m1_m(...) __riscv_vssseg6e64_v_u64m1_m(__VA_ARGS__) |
| #define | vssseg6e8_v_i8m1(...) __riscv_vssseg6e8_v_i8m1(__VA_ARGS__) |
| #define | vssseg6e8_v_i8m1_m(...) __riscv_vssseg6e8_v_i8m1_m(__VA_ARGS__) |
| #define | vssseg6e8_v_i8mf2(...) __riscv_vssseg6e8_v_i8mf2(__VA_ARGS__) |
| #define | vssseg6e8_v_i8mf2_m(...) __riscv_vssseg6e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vssseg6e8_v_i8mf4(...) __riscv_vssseg6e8_v_i8mf4(__VA_ARGS__) |
| #define | vssseg6e8_v_i8mf4_m(...) __riscv_vssseg6e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vssseg6e8_v_i8mf8(...) __riscv_vssseg6e8_v_i8mf8(__VA_ARGS__) |
| #define | vssseg6e8_v_i8mf8_m(...) __riscv_vssseg6e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vssseg6e8_v_u8m1(...) __riscv_vssseg6e8_v_u8m1(__VA_ARGS__) |
| #define | vssseg6e8_v_u8m1_m(...) __riscv_vssseg6e8_v_u8m1_m(__VA_ARGS__) |
| #define | vssseg6e8_v_u8mf2(...) __riscv_vssseg6e8_v_u8mf2(__VA_ARGS__) |
| #define | vssseg6e8_v_u8mf2_m(...) __riscv_vssseg6e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vssseg6e8_v_u8mf4(...) __riscv_vssseg6e8_v_u8mf4(__VA_ARGS__) |
| #define | vssseg6e8_v_u8mf4_m(...) __riscv_vssseg6e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vssseg6e8_v_u8mf8(...) __riscv_vssseg6e8_v_u8mf8(__VA_ARGS__) |
| #define | vssseg6e8_v_u8mf8_m(...) __riscv_vssseg6e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vssseg7e16_v_f16m1(...) __riscv_vssseg7e16_v_f16m1(__VA_ARGS__) |
| #define | vssseg7e16_v_f16m1_m(...) __riscv_vssseg7e16_v_f16m1_m(__VA_ARGS__) |
| #define | vssseg7e16_v_f16mf2(...) __riscv_vssseg7e16_v_f16mf2(__VA_ARGS__) |
| #define | vssseg7e16_v_f16mf2_m(...) __riscv_vssseg7e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vssseg7e16_v_f16mf4(...) __riscv_vssseg7e16_v_f16mf4(__VA_ARGS__) |
| #define | vssseg7e16_v_f16mf4_m(...) __riscv_vssseg7e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vssseg7e16_v_i16m1(...) __riscv_vssseg7e16_v_i16m1(__VA_ARGS__) |
| #define | vssseg7e16_v_i16m1_m(...) __riscv_vssseg7e16_v_i16m1_m(__VA_ARGS__) |
| #define | vssseg7e16_v_i16mf2(...) __riscv_vssseg7e16_v_i16mf2(__VA_ARGS__) |
| #define | vssseg7e16_v_i16mf2_m(...) __riscv_vssseg7e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vssseg7e16_v_i16mf4(...) __riscv_vssseg7e16_v_i16mf4(__VA_ARGS__) |
| #define | vssseg7e16_v_i16mf4_m(...) __riscv_vssseg7e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vssseg7e16_v_u16m1(...) __riscv_vssseg7e16_v_u16m1(__VA_ARGS__) |
| #define | vssseg7e16_v_u16m1_m(...) __riscv_vssseg7e16_v_u16m1_m(__VA_ARGS__) |
| #define | vssseg7e16_v_u16mf2(...) __riscv_vssseg7e16_v_u16mf2(__VA_ARGS__) |
| #define | vssseg7e16_v_u16mf2_m(...) __riscv_vssseg7e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vssseg7e16_v_u16mf4(...) __riscv_vssseg7e16_v_u16mf4(__VA_ARGS__) |
| #define | vssseg7e16_v_u16mf4_m(...) __riscv_vssseg7e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vssseg7e32_v_f32m1(...) __riscv_vssseg7e32_v_f32m1(__VA_ARGS__) |
| #define | vssseg7e32_v_f32m1_m(...) __riscv_vssseg7e32_v_f32m1_m(__VA_ARGS__) |
| #define | vssseg7e32_v_f32mf2(...) __riscv_vssseg7e32_v_f32mf2(__VA_ARGS__) |
| #define | vssseg7e32_v_f32mf2_m(...) __riscv_vssseg7e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vssseg7e32_v_i32m1(...) __riscv_vssseg7e32_v_i32m1(__VA_ARGS__) |
| #define | vssseg7e32_v_i32m1_m(...) __riscv_vssseg7e32_v_i32m1_m(__VA_ARGS__) |
| #define | vssseg7e32_v_i32mf2(...) __riscv_vssseg7e32_v_i32mf2(__VA_ARGS__) |
| #define | vssseg7e32_v_i32mf2_m(...) __riscv_vssseg7e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vssseg7e32_v_u32m1(...) __riscv_vssseg7e32_v_u32m1(__VA_ARGS__) |
| #define | vssseg7e32_v_u32m1_m(...) __riscv_vssseg7e32_v_u32m1_m(__VA_ARGS__) |
| #define | vssseg7e32_v_u32mf2(...) __riscv_vssseg7e32_v_u32mf2(__VA_ARGS__) |
| #define | vssseg7e32_v_u32mf2_m(...) __riscv_vssseg7e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vssseg7e64_v_f64m1(...) __riscv_vssseg7e64_v_f64m1(__VA_ARGS__) |
| #define | vssseg7e64_v_f64m1_m(...) __riscv_vssseg7e64_v_f64m1_m(__VA_ARGS__) |
| #define | vssseg7e64_v_i64m1(...) __riscv_vssseg7e64_v_i64m1(__VA_ARGS__) |
| #define | vssseg7e64_v_i64m1_m(...) __riscv_vssseg7e64_v_i64m1_m(__VA_ARGS__) |
| #define | vssseg7e64_v_u64m1(...) __riscv_vssseg7e64_v_u64m1(__VA_ARGS__) |
| #define | vssseg7e64_v_u64m1_m(...) __riscv_vssseg7e64_v_u64m1_m(__VA_ARGS__) |
| #define | vssseg7e8_v_i8m1(...) __riscv_vssseg7e8_v_i8m1(__VA_ARGS__) |
| #define | vssseg7e8_v_i8m1_m(...) __riscv_vssseg7e8_v_i8m1_m(__VA_ARGS__) |
| #define | vssseg7e8_v_i8mf2(...) __riscv_vssseg7e8_v_i8mf2(__VA_ARGS__) |
| #define | vssseg7e8_v_i8mf2_m(...) __riscv_vssseg7e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vssseg7e8_v_i8mf4(...) __riscv_vssseg7e8_v_i8mf4(__VA_ARGS__) |
| #define | vssseg7e8_v_i8mf4_m(...) __riscv_vssseg7e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vssseg7e8_v_i8mf8(...) __riscv_vssseg7e8_v_i8mf8(__VA_ARGS__) |
| #define | vssseg7e8_v_i8mf8_m(...) __riscv_vssseg7e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vssseg7e8_v_u8m1(...) __riscv_vssseg7e8_v_u8m1(__VA_ARGS__) |
| #define | vssseg7e8_v_u8m1_m(...) __riscv_vssseg7e8_v_u8m1_m(__VA_ARGS__) |
| #define | vssseg7e8_v_u8mf2(...) __riscv_vssseg7e8_v_u8mf2(__VA_ARGS__) |
| #define | vssseg7e8_v_u8mf2_m(...) __riscv_vssseg7e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vssseg7e8_v_u8mf4(...) __riscv_vssseg7e8_v_u8mf4(__VA_ARGS__) |
| #define | vssseg7e8_v_u8mf4_m(...) __riscv_vssseg7e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vssseg7e8_v_u8mf8(...) __riscv_vssseg7e8_v_u8mf8(__VA_ARGS__) |
| #define | vssseg7e8_v_u8mf8_m(...) __riscv_vssseg7e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vssseg8e16_v_f16m1(...) __riscv_vssseg8e16_v_f16m1(__VA_ARGS__) |
| #define | vssseg8e16_v_f16m1_m(...) __riscv_vssseg8e16_v_f16m1_m(__VA_ARGS__) |
| #define | vssseg8e16_v_f16mf2(...) __riscv_vssseg8e16_v_f16mf2(__VA_ARGS__) |
| #define | vssseg8e16_v_f16mf2_m(...) __riscv_vssseg8e16_v_f16mf2_m(__VA_ARGS__) |
| #define | vssseg8e16_v_f16mf4(...) __riscv_vssseg8e16_v_f16mf4(__VA_ARGS__) |
| #define | vssseg8e16_v_f16mf4_m(...) __riscv_vssseg8e16_v_f16mf4_m(__VA_ARGS__) |
| #define | vssseg8e16_v_i16m1(...) __riscv_vssseg8e16_v_i16m1(__VA_ARGS__) |
| #define | vssseg8e16_v_i16m1_m(...) __riscv_vssseg8e16_v_i16m1_m(__VA_ARGS__) |
| #define | vssseg8e16_v_i16mf2(...) __riscv_vssseg8e16_v_i16mf2(__VA_ARGS__) |
| #define | vssseg8e16_v_i16mf2_m(...) __riscv_vssseg8e16_v_i16mf2_m(__VA_ARGS__) |
| #define | vssseg8e16_v_i16mf4(...) __riscv_vssseg8e16_v_i16mf4(__VA_ARGS__) |
| #define | vssseg8e16_v_i16mf4_m(...) __riscv_vssseg8e16_v_i16mf4_m(__VA_ARGS__) |
| #define | vssseg8e16_v_u16m1(...) __riscv_vssseg8e16_v_u16m1(__VA_ARGS__) |
| #define | vssseg8e16_v_u16m1_m(...) __riscv_vssseg8e16_v_u16m1_m(__VA_ARGS__) |
| #define | vssseg8e16_v_u16mf2(...) __riscv_vssseg8e16_v_u16mf2(__VA_ARGS__) |
| #define | vssseg8e16_v_u16mf2_m(...) __riscv_vssseg8e16_v_u16mf2_m(__VA_ARGS__) |
| #define | vssseg8e16_v_u16mf4(...) __riscv_vssseg8e16_v_u16mf4(__VA_ARGS__) |
| #define | vssseg8e16_v_u16mf4_m(...) __riscv_vssseg8e16_v_u16mf4_m(__VA_ARGS__) |
| #define | vssseg8e32_v_f32m1(...) __riscv_vssseg8e32_v_f32m1(__VA_ARGS__) |
| #define | vssseg8e32_v_f32m1_m(...) __riscv_vssseg8e32_v_f32m1_m(__VA_ARGS__) |
| #define | vssseg8e32_v_f32mf2(...) __riscv_vssseg8e32_v_f32mf2(__VA_ARGS__) |
| #define | vssseg8e32_v_f32mf2_m(...) __riscv_vssseg8e32_v_f32mf2_m(__VA_ARGS__) |
| #define | vssseg8e32_v_i32m1(...) __riscv_vssseg8e32_v_i32m1(__VA_ARGS__) |
| #define | vssseg8e32_v_i32m1_m(...) __riscv_vssseg8e32_v_i32m1_m(__VA_ARGS__) |
| #define | vssseg8e32_v_i32mf2(...) __riscv_vssseg8e32_v_i32mf2(__VA_ARGS__) |
| #define | vssseg8e32_v_i32mf2_m(...) __riscv_vssseg8e32_v_i32mf2_m(__VA_ARGS__) |
| #define | vssseg8e32_v_u32m1(...) __riscv_vssseg8e32_v_u32m1(__VA_ARGS__) |
| #define | vssseg8e32_v_u32m1_m(...) __riscv_vssseg8e32_v_u32m1_m(__VA_ARGS__) |
| #define | vssseg8e32_v_u32mf2(...) __riscv_vssseg8e32_v_u32mf2(__VA_ARGS__) |
| #define | vssseg8e32_v_u32mf2_m(...) __riscv_vssseg8e32_v_u32mf2_m(__VA_ARGS__) |
| #define | vssseg8e64_v_f64m1(...) __riscv_vssseg8e64_v_f64m1(__VA_ARGS__) |
| #define | vssseg8e64_v_f64m1_m(...) __riscv_vssseg8e64_v_f64m1_m(__VA_ARGS__) |
| #define | vssseg8e64_v_i64m1(...) __riscv_vssseg8e64_v_i64m1(__VA_ARGS__) |
| #define | vssseg8e64_v_i64m1_m(...) __riscv_vssseg8e64_v_i64m1_m(__VA_ARGS__) |
| #define | vssseg8e64_v_u64m1(...) __riscv_vssseg8e64_v_u64m1(__VA_ARGS__) |
| #define | vssseg8e64_v_u64m1_m(...) __riscv_vssseg8e64_v_u64m1_m(__VA_ARGS__) |
| #define | vssseg8e8_v_i8m1(...) __riscv_vssseg8e8_v_i8m1(__VA_ARGS__) |
| #define | vssseg8e8_v_i8m1_m(...) __riscv_vssseg8e8_v_i8m1_m(__VA_ARGS__) |
| #define | vssseg8e8_v_i8mf2(...) __riscv_vssseg8e8_v_i8mf2(__VA_ARGS__) |
| #define | vssseg8e8_v_i8mf2_m(...) __riscv_vssseg8e8_v_i8mf2_m(__VA_ARGS__) |
| #define | vssseg8e8_v_i8mf4(...) __riscv_vssseg8e8_v_i8mf4(__VA_ARGS__) |
| #define | vssseg8e8_v_i8mf4_m(...) __riscv_vssseg8e8_v_i8mf4_m(__VA_ARGS__) |
| #define | vssseg8e8_v_i8mf8(...) __riscv_vssseg8e8_v_i8mf8(__VA_ARGS__) |
| #define | vssseg8e8_v_i8mf8_m(...) __riscv_vssseg8e8_v_i8mf8_m(__VA_ARGS__) |
| #define | vssseg8e8_v_u8m1(...) __riscv_vssseg8e8_v_u8m1(__VA_ARGS__) |
| #define | vssseg8e8_v_u8m1_m(...) __riscv_vssseg8e8_v_u8m1_m(__VA_ARGS__) |
| #define | vssseg8e8_v_u8mf2(...) __riscv_vssseg8e8_v_u8mf2(__VA_ARGS__) |
| #define | vssseg8e8_v_u8mf2_m(...) __riscv_vssseg8e8_v_u8mf2_m(__VA_ARGS__) |
| #define | vssseg8e8_v_u8mf4(...) __riscv_vssseg8e8_v_u8mf4(__VA_ARGS__) |
| #define | vssseg8e8_v_u8mf4_m(...) __riscv_vssseg8e8_v_u8mf4_m(__VA_ARGS__) |
| #define | vssseg8e8_v_u8mf8(...) __riscv_vssseg8e8_v_u8mf8(__VA_ARGS__) |
| #define | vssseg8e8_v_u8mf8_m(...) __riscv_vssseg8e8_v_u8mf8_m(__VA_ARGS__) |
| #define | vssub_vv_i16m1(...) __riscv_vssub_vv_i16m1(__VA_ARGS__) |
| #define | vssub_vv_i16m1_m(...) __riscv_vssub_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vssub_vv_i16m2(...) __riscv_vssub_vv_i16m2(__VA_ARGS__) |
| #define | vssub_vv_i16m2_m(...) __riscv_vssub_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vssub_vv_i16m4(...) __riscv_vssub_vv_i16m4(__VA_ARGS__) |
| #define | vssub_vv_i16m4_m(...) __riscv_vssub_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vssub_vv_i16m8(...) __riscv_vssub_vv_i16m8(__VA_ARGS__) |
| #define | vssub_vv_i16m8_m(...) __riscv_vssub_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vssub_vv_i16mf2(...) __riscv_vssub_vv_i16mf2(__VA_ARGS__) |
| #define | vssub_vv_i16mf2_m(...) __riscv_vssub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vssub_vv_i16mf4(...) __riscv_vssub_vv_i16mf4(__VA_ARGS__) |
| #define | vssub_vv_i16mf4_m(...) __riscv_vssub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vssub_vv_i32m1(...) __riscv_vssub_vv_i32m1(__VA_ARGS__) |
| #define | vssub_vv_i32m1_m(...) __riscv_vssub_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vssub_vv_i32m2(...) __riscv_vssub_vv_i32m2(__VA_ARGS__) |
| #define | vssub_vv_i32m2_m(...) __riscv_vssub_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vssub_vv_i32m4(...) __riscv_vssub_vv_i32m4(__VA_ARGS__) |
| #define | vssub_vv_i32m4_m(...) __riscv_vssub_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vssub_vv_i32m8(...) __riscv_vssub_vv_i32m8(__VA_ARGS__) |
| #define | vssub_vv_i32m8_m(...) __riscv_vssub_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vssub_vv_i32mf2(...) __riscv_vssub_vv_i32mf2(__VA_ARGS__) |
| #define | vssub_vv_i32mf2_m(...) __riscv_vssub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vssub_vv_i64m1(...) __riscv_vssub_vv_i64m1(__VA_ARGS__) |
| #define | vssub_vv_i64m1_m(...) __riscv_vssub_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vssub_vv_i64m2(...) __riscv_vssub_vv_i64m2(__VA_ARGS__) |
| #define | vssub_vv_i64m2_m(...) __riscv_vssub_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vssub_vv_i64m4(...) __riscv_vssub_vv_i64m4(__VA_ARGS__) |
| #define | vssub_vv_i64m4_m(...) __riscv_vssub_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vssub_vv_i64m8(...) __riscv_vssub_vv_i64m8(__VA_ARGS__) |
| #define | vssub_vv_i64m8_m(...) __riscv_vssub_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vssub_vv_i8m1(...) __riscv_vssub_vv_i8m1(__VA_ARGS__) |
| #define | vssub_vv_i8m1_m(...) __riscv_vssub_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vssub_vv_i8m2(...) __riscv_vssub_vv_i8m2(__VA_ARGS__) |
| #define | vssub_vv_i8m2_m(...) __riscv_vssub_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vssub_vv_i8m4(...) __riscv_vssub_vv_i8m4(__VA_ARGS__) |
| #define | vssub_vv_i8m4_m(...) __riscv_vssub_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vssub_vv_i8m8(...) __riscv_vssub_vv_i8m8(__VA_ARGS__) |
| #define | vssub_vv_i8m8_m(...) __riscv_vssub_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vssub_vv_i8mf2(...) __riscv_vssub_vv_i8mf2(__VA_ARGS__) |
| #define | vssub_vv_i8mf2_m(...) __riscv_vssub_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vssub_vv_i8mf4(...) __riscv_vssub_vv_i8mf4(__VA_ARGS__) |
| #define | vssub_vv_i8mf4_m(...) __riscv_vssub_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vssub_vv_i8mf8(...) __riscv_vssub_vv_i8mf8(__VA_ARGS__) |
| #define | vssub_vv_i8mf8_m(...) __riscv_vssub_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vssub_vx_i16m1(...) __riscv_vssub_vx_i16m1(__VA_ARGS__) |
| #define | vssub_vx_i16m1_m(...) __riscv_vssub_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vssub_vx_i16m2(...) __riscv_vssub_vx_i16m2(__VA_ARGS__) |
| #define | vssub_vx_i16m2_m(...) __riscv_vssub_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vssub_vx_i16m4(...) __riscv_vssub_vx_i16m4(__VA_ARGS__) |
| #define | vssub_vx_i16m4_m(...) __riscv_vssub_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vssub_vx_i16m8(...) __riscv_vssub_vx_i16m8(__VA_ARGS__) |
| #define | vssub_vx_i16m8_m(...) __riscv_vssub_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vssub_vx_i16mf2(...) __riscv_vssub_vx_i16mf2(__VA_ARGS__) |
| #define | vssub_vx_i16mf2_m(...) __riscv_vssub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vssub_vx_i16mf4(...) __riscv_vssub_vx_i16mf4(__VA_ARGS__) |
| #define | vssub_vx_i16mf4_m(...) __riscv_vssub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vssub_vx_i32m1(...) __riscv_vssub_vx_i32m1(__VA_ARGS__) |
| #define | vssub_vx_i32m1_m(...) __riscv_vssub_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vssub_vx_i32m2(...) __riscv_vssub_vx_i32m2(__VA_ARGS__) |
| #define | vssub_vx_i32m2_m(...) __riscv_vssub_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vssub_vx_i32m4(...) __riscv_vssub_vx_i32m4(__VA_ARGS__) |
| #define | vssub_vx_i32m4_m(...) __riscv_vssub_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vssub_vx_i32m8(...) __riscv_vssub_vx_i32m8(__VA_ARGS__) |
| #define | vssub_vx_i32m8_m(...) __riscv_vssub_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vssub_vx_i32mf2(...) __riscv_vssub_vx_i32mf2(__VA_ARGS__) |
| #define | vssub_vx_i32mf2_m(...) __riscv_vssub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vssub_vx_i64m1(...) __riscv_vssub_vx_i64m1(__VA_ARGS__) |
| #define | vssub_vx_i64m1_m(...) __riscv_vssub_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vssub_vx_i64m2(...) __riscv_vssub_vx_i64m2(__VA_ARGS__) |
| #define | vssub_vx_i64m2_m(...) __riscv_vssub_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vssub_vx_i64m4(...) __riscv_vssub_vx_i64m4(__VA_ARGS__) |
| #define | vssub_vx_i64m4_m(...) __riscv_vssub_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vssub_vx_i64m8(...) __riscv_vssub_vx_i64m8(__VA_ARGS__) |
| #define | vssub_vx_i64m8_m(...) __riscv_vssub_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vssub_vx_i8m1(...) __riscv_vssub_vx_i8m1(__VA_ARGS__) |
| #define | vssub_vx_i8m1_m(...) __riscv_vssub_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vssub_vx_i8m2(...) __riscv_vssub_vx_i8m2(__VA_ARGS__) |
| #define | vssub_vx_i8m2_m(...) __riscv_vssub_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vssub_vx_i8m4(...) __riscv_vssub_vx_i8m4(__VA_ARGS__) |
| #define | vssub_vx_i8m4_m(...) __riscv_vssub_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vssub_vx_i8m8(...) __riscv_vssub_vx_i8m8(__VA_ARGS__) |
| #define | vssub_vx_i8m8_m(...) __riscv_vssub_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vssub_vx_i8mf2(...) __riscv_vssub_vx_i8mf2(__VA_ARGS__) |
| #define | vssub_vx_i8mf2_m(...) __riscv_vssub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vssub_vx_i8mf4(...) __riscv_vssub_vx_i8mf4(__VA_ARGS__) |
| #define | vssub_vx_i8mf4_m(...) __riscv_vssub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vssub_vx_i8mf8(...) __riscv_vssub_vx_i8mf8(__VA_ARGS__) |
| #define | vssub_vx_i8mf8_m(...) __riscv_vssub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u16m1(...) __riscv_vssubu_vv_u16m1(__VA_ARGS__) |
| #define | vssubu_vv_u16m1_m(...) __riscv_vssubu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u16m2(...) __riscv_vssubu_vv_u16m2(__VA_ARGS__) |
| #define | vssubu_vv_u16m2_m(...) __riscv_vssubu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u16m4(...) __riscv_vssubu_vv_u16m4(__VA_ARGS__) |
| #define | vssubu_vv_u16m4_m(...) __riscv_vssubu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u16m8(...) __riscv_vssubu_vv_u16m8(__VA_ARGS__) |
| #define | vssubu_vv_u16m8_m(...) __riscv_vssubu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u16mf2(...) __riscv_vssubu_vv_u16mf2(__VA_ARGS__) |
| #define | vssubu_vv_u16mf2_m(...) __riscv_vssubu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u16mf4(...) __riscv_vssubu_vv_u16mf4(__VA_ARGS__) |
| #define | vssubu_vv_u16mf4_m(...) __riscv_vssubu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u32m1(...) __riscv_vssubu_vv_u32m1(__VA_ARGS__) |
| #define | vssubu_vv_u32m1_m(...) __riscv_vssubu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u32m2(...) __riscv_vssubu_vv_u32m2(__VA_ARGS__) |
| #define | vssubu_vv_u32m2_m(...) __riscv_vssubu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u32m4(...) __riscv_vssubu_vv_u32m4(__VA_ARGS__) |
| #define | vssubu_vv_u32m4_m(...) __riscv_vssubu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u32m8(...) __riscv_vssubu_vv_u32m8(__VA_ARGS__) |
| #define | vssubu_vv_u32m8_m(...) __riscv_vssubu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u32mf2(...) __riscv_vssubu_vv_u32mf2(__VA_ARGS__) |
| #define | vssubu_vv_u32mf2_m(...) __riscv_vssubu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u64m1(...) __riscv_vssubu_vv_u64m1(__VA_ARGS__) |
| #define | vssubu_vv_u64m1_m(...) __riscv_vssubu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u64m2(...) __riscv_vssubu_vv_u64m2(__VA_ARGS__) |
| #define | vssubu_vv_u64m2_m(...) __riscv_vssubu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u64m4(...) __riscv_vssubu_vv_u64m4(__VA_ARGS__) |
| #define | vssubu_vv_u64m4_m(...) __riscv_vssubu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u64m8(...) __riscv_vssubu_vv_u64m8(__VA_ARGS__) |
| #define | vssubu_vv_u64m8_m(...) __riscv_vssubu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u8m1(...) __riscv_vssubu_vv_u8m1(__VA_ARGS__) |
| #define | vssubu_vv_u8m1_m(...) __riscv_vssubu_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u8m2(...) __riscv_vssubu_vv_u8m2(__VA_ARGS__) |
| #define | vssubu_vv_u8m2_m(...) __riscv_vssubu_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u8m4(...) __riscv_vssubu_vv_u8m4(__VA_ARGS__) |
| #define | vssubu_vv_u8m4_m(...) __riscv_vssubu_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u8m8(...) __riscv_vssubu_vv_u8m8(__VA_ARGS__) |
| #define | vssubu_vv_u8m8_m(...) __riscv_vssubu_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u8mf2(...) __riscv_vssubu_vv_u8mf2(__VA_ARGS__) |
| #define | vssubu_vv_u8mf2_m(...) __riscv_vssubu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u8mf4(...) __riscv_vssubu_vv_u8mf4(__VA_ARGS__) |
| #define | vssubu_vv_u8mf4_m(...) __riscv_vssubu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vssubu_vv_u8mf8(...) __riscv_vssubu_vv_u8mf8(__VA_ARGS__) |
| #define | vssubu_vv_u8mf8_m(...) __riscv_vssubu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u16m1(...) __riscv_vssubu_vx_u16m1(__VA_ARGS__) |
| #define | vssubu_vx_u16m1_m(...) __riscv_vssubu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u16m2(...) __riscv_vssubu_vx_u16m2(__VA_ARGS__) |
| #define | vssubu_vx_u16m2_m(...) __riscv_vssubu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u16m4(...) __riscv_vssubu_vx_u16m4(__VA_ARGS__) |
| #define | vssubu_vx_u16m4_m(...) __riscv_vssubu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u16m8(...) __riscv_vssubu_vx_u16m8(__VA_ARGS__) |
| #define | vssubu_vx_u16m8_m(...) __riscv_vssubu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u16mf2(...) __riscv_vssubu_vx_u16mf2(__VA_ARGS__) |
| #define | vssubu_vx_u16mf2_m(...) __riscv_vssubu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u16mf4(...) __riscv_vssubu_vx_u16mf4(__VA_ARGS__) |
| #define | vssubu_vx_u16mf4_m(...) __riscv_vssubu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u32m1(...) __riscv_vssubu_vx_u32m1(__VA_ARGS__) |
| #define | vssubu_vx_u32m1_m(...) __riscv_vssubu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u32m2(...) __riscv_vssubu_vx_u32m2(__VA_ARGS__) |
| #define | vssubu_vx_u32m2_m(...) __riscv_vssubu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u32m4(...) __riscv_vssubu_vx_u32m4(__VA_ARGS__) |
| #define | vssubu_vx_u32m4_m(...) __riscv_vssubu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u32m8(...) __riscv_vssubu_vx_u32m8(__VA_ARGS__) |
| #define | vssubu_vx_u32m8_m(...) __riscv_vssubu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u32mf2(...) __riscv_vssubu_vx_u32mf2(__VA_ARGS__) |
| #define | vssubu_vx_u32mf2_m(...) __riscv_vssubu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u64m1(...) __riscv_vssubu_vx_u64m1(__VA_ARGS__) |
| #define | vssubu_vx_u64m1_m(...) __riscv_vssubu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u64m2(...) __riscv_vssubu_vx_u64m2(__VA_ARGS__) |
| #define | vssubu_vx_u64m2_m(...) __riscv_vssubu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u64m4(...) __riscv_vssubu_vx_u64m4(__VA_ARGS__) |
| #define | vssubu_vx_u64m4_m(...) __riscv_vssubu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u64m8(...) __riscv_vssubu_vx_u64m8(__VA_ARGS__) |
| #define | vssubu_vx_u64m8_m(...) __riscv_vssubu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u8m1(...) __riscv_vssubu_vx_u8m1(__VA_ARGS__) |
| #define | vssubu_vx_u8m1_m(...) __riscv_vssubu_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u8m2(...) __riscv_vssubu_vx_u8m2(__VA_ARGS__) |
| #define | vssubu_vx_u8m2_m(...) __riscv_vssubu_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u8m4(...) __riscv_vssubu_vx_u8m4(__VA_ARGS__) |
| #define | vssubu_vx_u8m4_m(...) __riscv_vssubu_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u8m8(...) __riscv_vssubu_vx_u8m8(__VA_ARGS__) |
| #define | vssubu_vx_u8m8_m(...) __riscv_vssubu_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u8mf2(...) __riscv_vssubu_vx_u8mf2(__VA_ARGS__) |
| #define | vssubu_vx_u8mf2_m(...) __riscv_vssubu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u8mf4(...) __riscv_vssubu_vx_u8mf4(__VA_ARGS__) |
| #define | vssubu_vx_u8mf4_m(...) __riscv_vssubu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vssubu_vx_u8mf8(...) __riscv_vssubu_vx_u8mf8(__VA_ARGS__) |
| #define | vssubu_vx_u8mf8_m(...) __riscv_vssubu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vsub_vv_i16m1(...) __riscv_vsub_vv_i16m1(__VA_ARGS__) |
| #define | vsub_vv_i16m1_m(...) __riscv_vsub_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vsub_vv_i16m2(...) __riscv_vsub_vv_i16m2(__VA_ARGS__) |
| #define | vsub_vv_i16m2_m(...) __riscv_vsub_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vsub_vv_i16m4(...) __riscv_vsub_vv_i16m4(__VA_ARGS__) |
| #define | vsub_vv_i16m4_m(...) __riscv_vsub_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vsub_vv_i16m8(...) __riscv_vsub_vv_i16m8(__VA_ARGS__) |
| #define | vsub_vv_i16m8_m(...) __riscv_vsub_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vsub_vv_i16mf2(...) __riscv_vsub_vv_i16mf2(__VA_ARGS__) |
| #define | vsub_vv_i16mf2_m(...) __riscv_vsub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vsub_vv_i16mf4(...) __riscv_vsub_vv_i16mf4(__VA_ARGS__) |
| #define | vsub_vv_i16mf4_m(...) __riscv_vsub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vsub_vv_i32m1(...) __riscv_vsub_vv_i32m1(__VA_ARGS__) |
| #define | vsub_vv_i32m1_m(...) __riscv_vsub_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vsub_vv_i32m2(...) __riscv_vsub_vv_i32m2(__VA_ARGS__) |
| #define | vsub_vv_i32m2_m(...) __riscv_vsub_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vsub_vv_i32m4(...) __riscv_vsub_vv_i32m4(__VA_ARGS__) |
| #define | vsub_vv_i32m4_m(...) __riscv_vsub_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vsub_vv_i32m8(...) __riscv_vsub_vv_i32m8(__VA_ARGS__) |
| #define | vsub_vv_i32m8_m(...) __riscv_vsub_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vsub_vv_i32mf2(...) __riscv_vsub_vv_i32mf2(__VA_ARGS__) |
| #define | vsub_vv_i32mf2_m(...) __riscv_vsub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vsub_vv_i64m1(...) __riscv_vsub_vv_i64m1(__VA_ARGS__) |
| #define | vsub_vv_i64m1_m(...) __riscv_vsub_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vsub_vv_i64m2(...) __riscv_vsub_vv_i64m2(__VA_ARGS__) |
| #define | vsub_vv_i64m2_m(...) __riscv_vsub_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vsub_vv_i64m4(...) __riscv_vsub_vv_i64m4(__VA_ARGS__) |
| #define | vsub_vv_i64m4_m(...) __riscv_vsub_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vsub_vv_i64m8(...) __riscv_vsub_vv_i64m8(__VA_ARGS__) |
| #define | vsub_vv_i64m8_m(...) __riscv_vsub_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vsub_vv_i8m1(...) __riscv_vsub_vv_i8m1(__VA_ARGS__) |
| #define | vsub_vv_i8m1_m(...) __riscv_vsub_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vsub_vv_i8m2(...) __riscv_vsub_vv_i8m2(__VA_ARGS__) |
| #define | vsub_vv_i8m2_m(...) __riscv_vsub_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vsub_vv_i8m4(...) __riscv_vsub_vv_i8m4(__VA_ARGS__) |
| #define | vsub_vv_i8m4_m(...) __riscv_vsub_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vsub_vv_i8m8(...) __riscv_vsub_vv_i8m8(__VA_ARGS__) |
| #define | vsub_vv_i8m8_m(...) __riscv_vsub_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vsub_vv_i8mf2(...) __riscv_vsub_vv_i8mf2(__VA_ARGS__) |
| #define | vsub_vv_i8mf2_m(...) __riscv_vsub_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vsub_vv_i8mf4(...) __riscv_vsub_vv_i8mf4(__VA_ARGS__) |
| #define | vsub_vv_i8mf4_m(...) __riscv_vsub_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vsub_vv_i8mf8(...) __riscv_vsub_vv_i8mf8(__VA_ARGS__) |
| #define | vsub_vv_i8mf8_m(...) __riscv_vsub_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vsub_vv_u16m1(...) __riscv_vsub_vv_u16m1(__VA_ARGS__) |
| #define | vsub_vv_u16m1_m(...) __riscv_vsub_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vsub_vv_u16m2(...) __riscv_vsub_vv_u16m2(__VA_ARGS__) |
| #define | vsub_vv_u16m2_m(...) __riscv_vsub_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vsub_vv_u16m4(...) __riscv_vsub_vv_u16m4(__VA_ARGS__) |
| #define | vsub_vv_u16m4_m(...) __riscv_vsub_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vsub_vv_u16m8(...) __riscv_vsub_vv_u16m8(__VA_ARGS__) |
| #define | vsub_vv_u16m8_m(...) __riscv_vsub_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vsub_vv_u16mf2(...) __riscv_vsub_vv_u16mf2(__VA_ARGS__) |
| #define | vsub_vv_u16mf2_m(...) __riscv_vsub_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vsub_vv_u16mf4(...) __riscv_vsub_vv_u16mf4(__VA_ARGS__) |
| #define | vsub_vv_u16mf4_m(...) __riscv_vsub_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vsub_vv_u32m1(...) __riscv_vsub_vv_u32m1(__VA_ARGS__) |
| #define | vsub_vv_u32m1_m(...) __riscv_vsub_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vsub_vv_u32m2(...) __riscv_vsub_vv_u32m2(__VA_ARGS__) |
| #define | vsub_vv_u32m2_m(...) __riscv_vsub_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vsub_vv_u32m4(...) __riscv_vsub_vv_u32m4(__VA_ARGS__) |
| #define | vsub_vv_u32m4_m(...) __riscv_vsub_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vsub_vv_u32m8(...) __riscv_vsub_vv_u32m8(__VA_ARGS__) |
| #define | vsub_vv_u32m8_m(...) __riscv_vsub_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vsub_vv_u32mf2(...) __riscv_vsub_vv_u32mf2(__VA_ARGS__) |
| #define | vsub_vv_u32mf2_m(...) __riscv_vsub_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vsub_vv_u64m1(...) __riscv_vsub_vv_u64m1(__VA_ARGS__) |
| #define | vsub_vv_u64m1_m(...) __riscv_vsub_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vsub_vv_u64m2(...) __riscv_vsub_vv_u64m2(__VA_ARGS__) |
| #define | vsub_vv_u64m2_m(...) __riscv_vsub_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vsub_vv_u64m4(...) __riscv_vsub_vv_u64m4(__VA_ARGS__) |
| #define | vsub_vv_u64m4_m(...) __riscv_vsub_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vsub_vv_u64m8(...) __riscv_vsub_vv_u64m8(__VA_ARGS__) |
| #define | vsub_vv_u64m8_m(...) __riscv_vsub_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vsub_vv_u8m1(...) __riscv_vsub_vv_u8m1(__VA_ARGS__) |
| #define | vsub_vv_u8m1_m(...) __riscv_vsub_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vsub_vv_u8m2(...) __riscv_vsub_vv_u8m2(__VA_ARGS__) |
| #define | vsub_vv_u8m2_m(...) __riscv_vsub_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vsub_vv_u8m4(...) __riscv_vsub_vv_u8m4(__VA_ARGS__) |
| #define | vsub_vv_u8m4_m(...) __riscv_vsub_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vsub_vv_u8m8(...) __riscv_vsub_vv_u8m8(__VA_ARGS__) |
| #define | vsub_vv_u8m8_m(...) __riscv_vsub_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vsub_vv_u8mf2(...) __riscv_vsub_vv_u8mf2(__VA_ARGS__) |
| #define | vsub_vv_u8mf2_m(...) __riscv_vsub_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vsub_vv_u8mf4(...) __riscv_vsub_vv_u8mf4(__VA_ARGS__) |
| #define | vsub_vv_u8mf4_m(...) __riscv_vsub_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vsub_vv_u8mf8(...) __riscv_vsub_vv_u8mf8(__VA_ARGS__) |
| #define | vsub_vv_u8mf8_m(...) __riscv_vsub_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vsub_vx_i16m1(...) __riscv_vsub_vx_i16m1(__VA_ARGS__) |
| #define | vsub_vx_i16m1_m(...) __riscv_vsub_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vsub_vx_i16m2(...) __riscv_vsub_vx_i16m2(__VA_ARGS__) |
| #define | vsub_vx_i16m2_m(...) __riscv_vsub_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vsub_vx_i16m4(...) __riscv_vsub_vx_i16m4(__VA_ARGS__) |
| #define | vsub_vx_i16m4_m(...) __riscv_vsub_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vsub_vx_i16m8(...) __riscv_vsub_vx_i16m8(__VA_ARGS__) |
| #define | vsub_vx_i16m8_m(...) __riscv_vsub_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vsub_vx_i16mf2(...) __riscv_vsub_vx_i16mf2(__VA_ARGS__) |
| #define | vsub_vx_i16mf2_m(...) __riscv_vsub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vsub_vx_i16mf4(...) __riscv_vsub_vx_i16mf4(__VA_ARGS__) |
| #define | vsub_vx_i16mf4_m(...) __riscv_vsub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vsub_vx_i32m1(...) __riscv_vsub_vx_i32m1(__VA_ARGS__) |
| #define | vsub_vx_i32m1_m(...) __riscv_vsub_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vsub_vx_i32m2(...) __riscv_vsub_vx_i32m2(__VA_ARGS__) |
| #define | vsub_vx_i32m2_m(...) __riscv_vsub_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vsub_vx_i32m4(...) __riscv_vsub_vx_i32m4(__VA_ARGS__) |
| #define | vsub_vx_i32m4_m(...) __riscv_vsub_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vsub_vx_i32m8(...) __riscv_vsub_vx_i32m8(__VA_ARGS__) |
| #define | vsub_vx_i32m8_m(...) __riscv_vsub_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vsub_vx_i32mf2(...) __riscv_vsub_vx_i32mf2(__VA_ARGS__) |
| #define | vsub_vx_i32mf2_m(...) __riscv_vsub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vsub_vx_i64m1(...) __riscv_vsub_vx_i64m1(__VA_ARGS__) |
| #define | vsub_vx_i64m1_m(...) __riscv_vsub_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vsub_vx_i64m2(...) __riscv_vsub_vx_i64m2(__VA_ARGS__) |
| #define | vsub_vx_i64m2_m(...) __riscv_vsub_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vsub_vx_i64m4(...) __riscv_vsub_vx_i64m4(__VA_ARGS__) |
| #define | vsub_vx_i64m4_m(...) __riscv_vsub_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vsub_vx_i64m8(...) __riscv_vsub_vx_i64m8(__VA_ARGS__) |
| #define | vsub_vx_i64m8_m(...) __riscv_vsub_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vsub_vx_i8m1(...) __riscv_vsub_vx_i8m1(__VA_ARGS__) |
| #define | vsub_vx_i8m1_m(...) __riscv_vsub_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vsub_vx_i8m2(...) __riscv_vsub_vx_i8m2(__VA_ARGS__) |
| #define | vsub_vx_i8m2_m(...) __riscv_vsub_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vsub_vx_i8m4(...) __riscv_vsub_vx_i8m4(__VA_ARGS__) |
| #define | vsub_vx_i8m4_m(...) __riscv_vsub_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vsub_vx_i8m8(...) __riscv_vsub_vx_i8m8(__VA_ARGS__) |
| #define | vsub_vx_i8m8_m(...) __riscv_vsub_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vsub_vx_i8mf2(...) __riscv_vsub_vx_i8mf2(__VA_ARGS__) |
| #define | vsub_vx_i8mf2_m(...) __riscv_vsub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vsub_vx_i8mf4(...) __riscv_vsub_vx_i8mf4(__VA_ARGS__) |
| #define | vsub_vx_i8mf4_m(...) __riscv_vsub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vsub_vx_i8mf8(...) __riscv_vsub_vx_i8mf8(__VA_ARGS__) |
| #define | vsub_vx_i8mf8_m(...) __riscv_vsub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vsub_vx_u16m1(...) __riscv_vsub_vx_u16m1(__VA_ARGS__) |
| #define | vsub_vx_u16m1_m(...) __riscv_vsub_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vsub_vx_u16m2(...) __riscv_vsub_vx_u16m2(__VA_ARGS__) |
| #define | vsub_vx_u16m2_m(...) __riscv_vsub_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vsub_vx_u16m4(...) __riscv_vsub_vx_u16m4(__VA_ARGS__) |
| #define | vsub_vx_u16m4_m(...) __riscv_vsub_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vsub_vx_u16m8(...) __riscv_vsub_vx_u16m8(__VA_ARGS__) |
| #define | vsub_vx_u16m8_m(...) __riscv_vsub_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vsub_vx_u16mf2(...) __riscv_vsub_vx_u16mf2(__VA_ARGS__) |
| #define | vsub_vx_u16mf2_m(...) __riscv_vsub_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vsub_vx_u16mf4(...) __riscv_vsub_vx_u16mf4(__VA_ARGS__) |
| #define | vsub_vx_u16mf4_m(...) __riscv_vsub_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vsub_vx_u32m1(...) __riscv_vsub_vx_u32m1(__VA_ARGS__) |
| #define | vsub_vx_u32m1_m(...) __riscv_vsub_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vsub_vx_u32m2(...) __riscv_vsub_vx_u32m2(__VA_ARGS__) |
| #define | vsub_vx_u32m2_m(...) __riscv_vsub_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vsub_vx_u32m4(...) __riscv_vsub_vx_u32m4(__VA_ARGS__) |
| #define | vsub_vx_u32m4_m(...) __riscv_vsub_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vsub_vx_u32m8(...) __riscv_vsub_vx_u32m8(__VA_ARGS__) |
| #define | vsub_vx_u32m8_m(...) __riscv_vsub_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vsub_vx_u32mf2(...) __riscv_vsub_vx_u32mf2(__VA_ARGS__) |
| #define | vsub_vx_u32mf2_m(...) __riscv_vsub_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vsub_vx_u64m1(...) __riscv_vsub_vx_u64m1(__VA_ARGS__) |
| #define | vsub_vx_u64m1_m(...) __riscv_vsub_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vsub_vx_u64m2(...) __riscv_vsub_vx_u64m2(__VA_ARGS__) |
| #define | vsub_vx_u64m2_m(...) __riscv_vsub_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vsub_vx_u64m4(...) __riscv_vsub_vx_u64m4(__VA_ARGS__) |
| #define | vsub_vx_u64m4_m(...) __riscv_vsub_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vsub_vx_u64m8(...) __riscv_vsub_vx_u64m8(__VA_ARGS__) |
| #define | vsub_vx_u64m8_m(...) __riscv_vsub_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vsub_vx_u8m1(...) __riscv_vsub_vx_u8m1(__VA_ARGS__) |
| #define | vsub_vx_u8m1_m(...) __riscv_vsub_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vsub_vx_u8m2(...) __riscv_vsub_vx_u8m2(__VA_ARGS__) |
| #define | vsub_vx_u8m2_m(...) __riscv_vsub_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vsub_vx_u8m4(...) __riscv_vsub_vx_u8m4(__VA_ARGS__) |
| #define | vsub_vx_u8m4_m(...) __riscv_vsub_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vsub_vx_u8m8(...) __riscv_vsub_vx_u8m8(__VA_ARGS__) |
| #define | vsub_vx_u8m8_m(...) __riscv_vsub_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vsub_vx_u8mf2(...) __riscv_vsub_vx_u8mf2(__VA_ARGS__) |
| #define | vsub_vx_u8mf2_m(...) __riscv_vsub_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vsub_vx_u8mf4(...) __riscv_vsub_vx_u8mf4(__VA_ARGS__) |
| #define | vsub_vx_u8mf4_m(...) __riscv_vsub_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vsub_vx_u8mf8(...) __riscv_vsub_vx_u8mf8(__VA_ARGS__) |
| #define | vsub_vx_u8mf8_m(...) __riscv_vsub_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vsuxei16_v_f16m1(...) __riscv_vsuxei16_v_f16m1(__VA_ARGS__) |
| #define | vsuxei16_v_f16m1_m(...) __riscv_vsuxei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_f16m2(...) __riscv_vsuxei16_v_f16m2(__VA_ARGS__) |
| #define | vsuxei16_v_f16m2_m(...) __riscv_vsuxei16_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_f16m4(...) __riscv_vsuxei16_v_f16m4(__VA_ARGS__) |
| #define | vsuxei16_v_f16m4_m(...) __riscv_vsuxei16_v_f16m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_f16m8(...) __riscv_vsuxei16_v_f16m8(__VA_ARGS__) |
| #define | vsuxei16_v_f16m8_m(...) __riscv_vsuxei16_v_f16m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_f16mf2(...) __riscv_vsuxei16_v_f16mf2(__VA_ARGS__) |
| #define | vsuxei16_v_f16mf2_m(...) __riscv_vsuxei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxei16_v_f16mf4(...) __riscv_vsuxei16_v_f16mf4(__VA_ARGS__) |
| #define | vsuxei16_v_f16mf4_m(...) __riscv_vsuxei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxei16_v_f32m1(...) __riscv_vsuxei16_v_f32m1(__VA_ARGS__) |
| #define | vsuxei16_v_f32m1_m(...) __riscv_vsuxei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_f32m2(...) __riscv_vsuxei16_v_f32m2(__VA_ARGS__) |
| #define | vsuxei16_v_f32m2_m(...) __riscv_vsuxei16_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_f32m4(...) __riscv_vsuxei16_v_f32m4(__VA_ARGS__) |
| #define | vsuxei16_v_f32m4_m(...) __riscv_vsuxei16_v_f32m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_f32m8(...) __riscv_vsuxei16_v_f32m8(__VA_ARGS__) |
| #define | vsuxei16_v_f32m8_m(...) __riscv_vsuxei16_v_f32m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_f32mf2(...) __riscv_vsuxei16_v_f32mf2(__VA_ARGS__) |
| #define | vsuxei16_v_f32mf2_m(...) __riscv_vsuxei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxei16_v_f64m1(...) __riscv_vsuxei16_v_f64m1(__VA_ARGS__) |
| #define | vsuxei16_v_f64m1_m(...) __riscv_vsuxei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_f64m2(...) __riscv_vsuxei16_v_f64m2(__VA_ARGS__) |
| #define | vsuxei16_v_f64m2_m(...) __riscv_vsuxei16_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_f64m4(...) __riscv_vsuxei16_v_f64m4(__VA_ARGS__) |
| #define | vsuxei16_v_f64m4_m(...) __riscv_vsuxei16_v_f64m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_f64m8(...) __riscv_vsuxei16_v_f64m8(__VA_ARGS__) |
| #define | vsuxei16_v_f64m8_m(...) __riscv_vsuxei16_v_f64m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_i16m1(...) __riscv_vsuxei16_v_i16m1(__VA_ARGS__) |
| #define | vsuxei16_v_i16m1_m(...) __riscv_vsuxei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_i16m2(...) __riscv_vsuxei16_v_i16m2(__VA_ARGS__) |
| #define | vsuxei16_v_i16m2_m(...) __riscv_vsuxei16_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_i16m4(...) __riscv_vsuxei16_v_i16m4(__VA_ARGS__) |
| #define | vsuxei16_v_i16m4_m(...) __riscv_vsuxei16_v_i16m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_i16m8(...) __riscv_vsuxei16_v_i16m8(__VA_ARGS__) |
| #define | vsuxei16_v_i16m8_m(...) __riscv_vsuxei16_v_i16m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_i16mf2(...) __riscv_vsuxei16_v_i16mf2(__VA_ARGS__) |
| #define | vsuxei16_v_i16mf2_m(...) __riscv_vsuxei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxei16_v_i16mf4(...) __riscv_vsuxei16_v_i16mf4(__VA_ARGS__) |
| #define | vsuxei16_v_i16mf4_m(...) __riscv_vsuxei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxei16_v_i32m1(...) __riscv_vsuxei16_v_i32m1(__VA_ARGS__) |
| #define | vsuxei16_v_i32m1_m(...) __riscv_vsuxei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_i32m2(...) __riscv_vsuxei16_v_i32m2(__VA_ARGS__) |
| #define | vsuxei16_v_i32m2_m(...) __riscv_vsuxei16_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_i32m4(...) __riscv_vsuxei16_v_i32m4(__VA_ARGS__) |
| #define | vsuxei16_v_i32m4_m(...) __riscv_vsuxei16_v_i32m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_i32m8(...) __riscv_vsuxei16_v_i32m8(__VA_ARGS__) |
| #define | vsuxei16_v_i32m8_m(...) __riscv_vsuxei16_v_i32m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_i32mf2(...) __riscv_vsuxei16_v_i32mf2(__VA_ARGS__) |
| #define | vsuxei16_v_i32mf2_m(...) __riscv_vsuxei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxei16_v_i64m1(...) __riscv_vsuxei16_v_i64m1(__VA_ARGS__) |
| #define | vsuxei16_v_i64m1_m(...) __riscv_vsuxei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_i64m2(...) __riscv_vsuxei16_v_i64m2(__VA_ARGS__) |
| #define | vsuxei16_v_i64m2_m(...) __riscv_vsuxei16_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_i64m4(...) __riscv_vsuxei16_v_i64m4(__VA_ARGS__) |
| #define | vsuxei16_v_i64m4_m(...) __riscv_vsuxei16_v_i64m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_i64m8(...) __riscv_vsuxei16_v_i64m8(__VA_ARGS__) |
| #define | vsuxei16_v_i64m8_m(...) __riscv_vsuxei16_v_i64m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_i8m1(...) __riscv_vsuxei16_v_i8m1(__VA_ARGS__) |
| #define | vsuxei16_v_i8m1_m(...) __riscv_vsuxei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_i8m2(...) __riscv_vsuxei16_v_i8m2(__VA_ARGS__) |
| #define | vsuxei16_v_i8m2_m(...) __riscv_vsuxei16_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_i8m4(...) __riscv_vsuxei16_v_i8m4(__VA_ARGS__) |
| #define | vsuxei16_v_i8m4_m(...) __riscv_vsuxei16_v_i8m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_i8mf2(...) __riscv_vsuxei16_v_i8mf2(__VA_ARGS__) |
| #define | vsuxei16_v_i8mf2_m(...) __riscv_vsuxei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxei16_v_i8mf4(...) __riscv_vsuxei16_v_i8mf4(__VA_ARGS__) |
| #define | vsuxei16_v_i8mf4_m(...) __riscv_vsuxei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxei16_v_i8mf8(...) __riscv_vsuxei16_v_i8mf8(__VA_ARGS__) |
| #define | vsuxei16_v_i8mf8_m(...) __riscv_vsuxei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxei16_v_u16m1(...) __riscv_vsuxei16_v_u16m1(__VA_ARGS__) |
| #define | vsuxei16_v_u16m1_m(...) __riscv_vsuxei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_u16m2(...) __riscv_vsuxei16_v_u16m2(__VA_ARGS__) |
| #define | vsuxei16_v_u16m2_m(...) __riscv_vsuxei16_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_u16m4(...) __riscv_vsuxei16_v_u16m4(__VA_ARGS__) |
| #define | vsuxei16_v_u16m4_m(...) __riscv_vsuxei16_v_u16m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_u16m8(...) __riscv_vsuxei16_v_u16m8(__VA_ARGS__) |
| #define | vsuxei16_v_u16m8_m(...) __riscv_vsuxei16_v_u16m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_u16mf2(...) __riscv_vsuxei16_v_u16mf2(__VA_ARGS__) |
| #define | vsuxei16_v_u16mf2_m(...) __riscv_vsuxei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxei16_v_u16mf4(...) __riscv_vsuxei16_v_u16mf4(__VA_ARGS__) |
| #define | vsuxei16_v_u16mf4_m(...) __riscv_vsuxei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxei16_v_u32m1(...) __riscv_vsuxei16_v_u32m1(__VA_ARGS__) |
| #define | vsuxei16_v_u32m1_m(...) __riscv_vsuxei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_u32m2(...) __riscv_vsuxei16_v_u32m2(__VA_ARGS__) |
| #define | vsuxei16_v_u32m2_m(...) __riscv_vsuxei16_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_u32m4(...) __riscv_vsuxei16_v_u32m4(__VA_ARGS__) |
| #define | vsuxei16_v_u32m4_m(...) __riscv_vsuxei16_v_u32m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_u32m8(...) __riscv_vsuxei16_v_u32m8(__VA_ARGS__) |
| #define | vsuxei16_v_u32m8_m(...) __riscv_vsuxei16_v_u32m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_u32mf2(...) __riscv_vsuxei16_v_u32mf2(__VA_ARGS__) |
| #define | vsuxei16_v_u32mf2_m(...) __riscv_vsuxei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxei16_v_u64m1(...) __riscv_vsuxei16_v_u64m1(__VA_ARGS__) |
| #define | vsuxei16_v_u64m1_m(...) __riscv_vsuxei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_u64m2(...) __riscv_vsuxei16_v_u64m2(__VA_ARGS__) |
| #define | vsuxei16_v_u64m2_m(...) __riscv_vsuxei16_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_u64m4(...) __riscv_vsuxei16_v_u64m4(__VA_ARGS__) |
| #define | vsuxei16_v_u64m4_m(...) __riscv_vsuxei16_v_u64m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_u64m8(...) __riscv_vsuxei16_v_u64m8(__VA_ARGS__) |
| #define | vsuxei16_v_u64m8_m(...) __riscv_vsuxei16_v_u64m8_m(__VA_ARGS__) |
| #define | vsuxei16_v_u8m1(...) __riscv_vsuxei16_v_u8m1(__VA_ARGS__) |
| #define | vsuxei16_v_u8m1_m(...) __riscv_vsuxei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxei16_v_u8m2(...) __riscv_vsuxei16_v_u8m2(__VA_ARGS__) |
| #define | vsuxei16_v_u8m2_m(...) __riscv_vsuxei16_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxei16_v_u8m4(...) __riscv_vsuxei16_v_u8m4(__VA_ARGS__) |
| #define | vsuxei16_v_u8m4_m(...) __riscv_vsuxei16_v_u8m4_m(__VA_ARGS__) |
| #define | vsuxei16_v_u8mf2(...) __riscv_vsuxei16_v_u8mf2(__VA_ARGS__) |
| #define | vsuxei16_v_u8mf2_m(...) __riscv_vsuxei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxei16_v_u8mf4(...) __riscv_vsuxei16_v_u8mf4(__VA_ARGS__) |
| #define | vsuxei16_v_u8mf4_m(...) __riscv_vsuxei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxei16_v_u8mf8(...) __riscv_vsuxei16_v_u8mf8(__VA_ARGS__) |
| #define | vsuxei16_v_u8mf8_m(...) __riscv_vsuxei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxei32_v_f16m1(...) __riscv_vsuxei32_v_f16m1(__VA_ARGS__) |
| #define | vsuxei32_v_f16m1_m(...) __riscv_vsuxei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_f16m2(...) __riscv_vsuxei32_v_f16m2(__VA_ARGS__) |
| #define | vsuxei32_v_f16m2_m(...) __riscv_vsuxei32_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_f16m4(...) __riscv_vsuxei32_v_f16m4(__VA_ARGS__) |
| #define | vsuxei32_v_f16m4_m(...) __riscv_vsuxei32_v_f16m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_f16mf2(...) __riscv_vsuxei32_v_f16mf2(__VA_ARGS__) |
| #define | vsuxei32_v_f16mf2_m(...) __riscv_vsuxei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxei32_v_f16mf4(...) __riscv_vsuxei32_v_f16mf4(__VA_ARGS__) |
| #define | vsuxei32_v_f16mf4_m(...) __riscv_vsuxei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxei32_v_f32m1(...) __riscv_vsuxei32_v_f32m1(__VA_ARGS__) |
| #define | vsuxei32_v_f32m1_m(...) __riscv_vsuxei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_f32m2(...) __riscv_vsuxei32_v_f32m2(__VA_ARGS__) |
| #define | vsuxei32_v_f32m2_m(...) __riscv_vsuxei32_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_f32m4(...) __riscv_vsuxei32_v_f32m4(__VA_ARGS__) |
| #define | vsuxei32_v_f32m4_m(...) __riscv_vsuxei32_v_f32m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_f32m8(...) __riscv_vsuxei32_v_f32m8(__VA_ARGS__) |
| #define | vsuxei32_v_f32m8_m(...) __riscv_vsuxei32_v_f32m8_m(__VA_ARGS__) |
| #define | vsuxei32_v_f32mf2(...) __riscv_vsuxei32_v_f32mf2(__VA_ARGS__) |
| #define | vsuxei32_v_f32mf2_m(...) __riscv_vsuxei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxei32_v_f64m1(...) __riscv_vsuxei32_v_f64m1(__VA_ARGS__) |
| #define | vsuxei32_v_f64m1_m(...) __riscv_vsuxei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_f64m2(...) __riscv_vsuxei32_v_f64m2(__VA_ARGS__) |
| #define | vsuxei32_v_f64m2_m(...) __riscv_vsuxei32_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_f64m4(...) __riscv_vsuxei32_v_f64m4(__VA_ARGS__) |
| #define | vsuxei32_v_f64m4_m(...) __riscv_vsuxei32_v_f64m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_f64m8(...) __riscv_vsuxei32_v_f64m8(__VA_ARGS__) |
| #define | vsuxei32_v_f64m8_m(...) __riscv_vsuxei32_v_f64m8_m(__VA_ARGS__) |
| #define | vsuxei32_v_i16m1(...) __riscv_vsuxei32_v_i16m1(__VA_ARGS__) |
| #define | vsuxei32_v_i16m1_m(...) __riscv_vsuxei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_i16m2(...) __riscv_vsuxei32_v_i16m2(__VA_ARGS__) |
| #define | vsuxei32_v_i16m2_m(...) __riscv_vsuxei32_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_i16m4(...) __riscv_vsuxei32_v_i16m4(__VA_ARGS__) |
| #define | vsuxei32_v_i16m4_m(...) __riscv_vsuxei32_v_i16m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_i16mf2(...) __riscv_vsuxei32_v_i16mf2(__VA_ARGS__) |
| #define | vsuxei32_v_i16mf2_m(...) __riscv_vsuxei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxei32_v_i16mf4(...) __riscv_vsuxei32_v_i16mf4(__VA_ARGS__) |
| #define | vsuxei32_v_i16mf4_m(...) __riscv_vsuxei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxei32_v_i32m1(...) __riscv_vsuxei32_v_i32m1(__VA_ARGS__) |
| #define | vsuxei32_v_i32m1_m(...) __riscv_vsuxei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_i32m2(...) __riscv_vsuxei32_v_i32m2(__VA_ARGS__) |
| #define | vsuxei32_v_i32m2_m(...) __riscv_vsuxei32_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_i32m4(...) __riscv_vsuxei32_v_i32m4(__VA_ARGS__) |
| #define | vsuxei32_v_i32m4_m(...) __riscv_vsuxei32_v_i32m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_i32m8(...) __riscv_vsuxei32_v_i32m8(__VA_ARGS__) |
| #define | vsuxei32_v_i32m8_m(...) __riscv_vsuxei32_v_i32m8_m(__VA_ARGS__) |
| #define | vsuxei32_v_i32mf2(...) __riscv_vsuxei32_v_i32mf2(__VA_ARGS__) |
| #define | vsuxei32_v_i32mf2_m(...) __riscv_vsuxei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxei32_v_i64m1(...) __riscv_vsuxei32_v_i64m1(__VA_ARGS__) |
| #define | vsuxei32_v_i64m1_m(...) __riscv_vsuxei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_i64m2(...) __riscv_vsuxei32_v_i64m2(__VA_ARGS__) |
| #define | vsuxei32_v_i64m2_m(...) __riscv_vsuxei32_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_i64m4(...) __riscv_vsuxei32_v_i64m4(__VA_ARGS__) |
| #define | vsuxei32_v_i64m4_m(...) __riscv_vsuxei32_v_i64m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_i64m8(...) __riscv_vsuxei32_v_i64m8(__VA_ARGS__) |
| #define | vsuxei32_v_i64m8_m(...) __riscv_vsuxei32_v_i64m8_m(__VA_ARGS__) |
| #define | vsuxei32_v_i8m1(...) __riscv_vsuxei32_v_i8m1(__VA_ARGS__) |
| #define | vsuxei32_v_i8m1_m(...) __riscv_vsuxei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_i8m2(...) __riscv_vsuxei32_v_i8m2(__VA_ARGS__) |
| #define | vsuxei32_v_i8m2_m(...) __riscv_vsuxei32_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_i8mf2(...) __riscv_vsuxei32_v_i8mf2(__VA_ARGS__) |
| #define | vsuxei32_v_i8mf2_m(...) __riscv_vsuxei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxei32_v_i8mf4(...) __riscv_vsuxei32_v_i8mf4(__VA_ARGS__) |
| #define | vsuxei32_v_i8mf4_m(...) __riscv_vsuxei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxei32_v_i8mf8(...) __riscv_vsuxei32_v_i8mf8(__VA_ARGS__) |
| #define | vsuxei32_v_i8mf8_m(...) __riscv_vsuxei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxei32_v_u16m1(...) __riscv_vsuxei32_v_u16m1(__VA_ARGS__) |
| #define | vsuxei32_v_u16m1_m(...) __riscv_vsuxei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_u16m2(...) __riscv_vsuxei32_v_u16m2(__VA_ARGS__) |
| #define | vsuxei32_v_u16m2_m(...) __riscv_vsuxei32_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_u16m4(...) __riscv_vsuxei32_v_u16m4(__VA_ARGS__) |
| #define | vsuxei32_v_u16m4_m(...) __riscv_vsuxei32_v_u16m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_u16mf2(...) __riscv_vsuxei32_v_u16mf2(__VA_ARGS__) |
| #define | vsuxei32_v_u16mf2_m(...) __riscv_vsuxei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxei32_v_u16mf4(...) __riscv_vsuxei32_v_u16mf4(__VA_ARGS__) |
| #define | vsuxei32_v_u16mf4_m(...) __riscv_vsuxei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxei32_v_u32m1(...) __riscv_vsuxei32_v_u32m1(__VA_ARGS__) |
| #define | vsuxei32_v_u32m1_m(...) __riscv_vsuxei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_u32m2(...) __riscv_vsuxei32_v_u32m2(__VA_ARGS__) |
| #define | vsuxei32_v_u32m2_m(...) __riscv_vsuxei32_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_u32m4(...) __riscv_vsuxei32_v_u32m4(__VA_ARGS__) |
| #define | vsuxei32_v_u32m4_m(...) __riscv_vsuxei32_v_u32m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_u32m8(...) __riscv_vsuxei32_v_u32m8(__VA_ARGS__) |
| #define | vsuxei32_v_u32m8_m(...) __riscv_vsuxei32_v_u32m8_m(__VA_ARGS__) |
| #define | vsuxei32_v_u32mf2(...) __riscv_vsuxei32_v_u32mf2(__VA_ARGS__) |
| #define | vsuxei32_v_u32mf2_m(...) __riscv_vsuxei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxei32_v_u64m1(...) __riscv_vsuxei32_v_u64m1(__VA_ARGS__) |
| #define | vsuxei32_v_u64m1_m(...) __riscv_vsuxei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_u64m2(...) __riscv_vsuxei32_v_u64m2(__VA_ARGS__) |
| #define | vsuxei32_v_u64m2_m(...) __riscv_vsuxei32_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_u64m4(...) __riscv_vsuxei32_v_u64m4(__VA_ARGS__) |
| #define | vsuxei32_v_u64m4_m(...) __riscv_vsuxei32_v_u64m4_m(__VA_ARGS__) |
| #define | vsuxei32_v_u64m8(...) __riscv_vsuxei32_v_u64m8(__VA_ARGS__) |
| #define | vsuxei32_v_u64m8_m(...) __riscv_vsuxei32_v_u64m8_m(__VA_ARGS__) |
| #define | vsuxei32_v_u8m1(...) __riscv_vsuxei32_v_u8m1(__VA_ARGS__) |
| #define | vsuxei32_v_u8m1_m(...) __riscv_vsuxei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxei32_v_u8m2(...) __riscv_vsuxei32_v_u8m2(__VA_ARGS__) |
| #define | vsuxei32_v_u8m2_m(...) __riscv_vsuxei32_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxei32_v_u8mf2(...) __riscv_vsuxei32_v_u8mf2(__VA_ARGS__) |
| #define | vsuxei32_v_u8mf2_m(...) __riscv_vsuxei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxei32_v_u8mf4(...) __riscv_vsuxei32_v_u8mf4(__VA_ARGS__) |
| #define | vsuxei32_v_u8mf4_m(...) __riscv_vsuxei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxei32_v_u8mf8(...) __riscv_vsuxei32_v_u8mf8(__VA_ARGS__) |
| #define | vsuxei32_v_u8mf8_m(...) __riscv_vsuxei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxei64_v_f16m1(...) __riscv_vsuxei64_v_f16m1(__VA_ARGS__) |
| #define | vsuxei64_v_f16m1_m(...) __riscv_vsuxei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_f16m2(...) __riscv_vsuxei64_v_f16m2(__VA_ARGS__) |
| #define | vsuxei64_v_f16m2_m(...) __riscv_vsuxei64_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_f16mf2(...) __riscv_vsuxei64_v_f16mf2(__VA_ARGS__) |
| #define | vsuxei64_v_f16mf2_m(...) __riscv_vsuxei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxei64_v_f16mf4(...) __riscv_vsuxei64_v_f16mf4(__VA_ARGS__) |
| #define | vsuxei64_v_f16mf4_m(...) __riscv_vsuxei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxei64_v_f32m1(...) __riscv_vsuxei64_v_f32m1(__VA_ARGS__) |
| #define | vsuxei64_v_f32m1_m(...) __riscv_vsuxei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_f32m2(...) __riscv_vsuxei64_v_f32m2(__VA_ARGS__) |
| #define | vsuxei64_v_f32m2_m(...) __riscv_vsuxei64_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_f32m4(...) __riscv_vsuxei64_v_f32m4(__VA_ARGS__) |
| #define | vsuxei64_v_f32m4_m(...) __riscv_vsuxei64_v_f32m4_m(__VA_ARGS__) |
| #define | vsuxei64_v_f32mf2(...) __riscv_vsuxei64_v_f32mf2(__VA_ARGS__) |
| #define | vsuxei64_v_f32mf2_m(...) __riscv_vsuxei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxei64_v_f64m1(...) __riscv_vsuxei64_v_f64m1(__VA_ARGS__) |
| #define | vsuxei64_v_f64m1_m(...) __riscv_vsuxei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_f64m2(...) __riscv_vsuxei64_v_f64m2(__VA_ARGS__) |
| #define | vsuxei64_v_f64m2_m(...) __riscv_vsuxei64_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_f64m4(...) __riscv_vsuxei64_v_f64m4(__VA_ARGS__) |
| #define | vsuxei64_v_f64m4_m(...) __riscv_vsuxei64_v_f64m4_m(__VA_ARGS__) |
| #define | vsuxei64_v_f64m8(...) __riscv_vsuxei64_v_f64m8(__VA_ARGS__) |
| #define | vsuxei64_v_f64m8_m(...) __riscv_vsuxei64_v_f64m8_m(__VA_ARGS__) |
| #define | vsuxei64_v_i16m1(...) __riscv_vsuxei64_v_i16m1(__VA_ARGS__) |
| #define | vsuxei64_v_i16m1_m(...) __riscv_vsuxei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_i16m2(...) __riscv_vsuxei64_v_i16m2(__VA_ARGS__) |
| #define | vsuxei64_v_i16m2_m(...) __riscv_vsuxei64_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_i16mf2(...) __riscv_vsuxei64_v_i16mf2(__VA_ARGS__) |
| #define | vsuxei64_v_i16mf2_m(...) __riscv_vsuxei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxei64_v_i16mf4(...) __riscv_vsuxei64_v_i16mf4(__VA_ARGS__) |
| #define | vsuxei64_v_i16mf4_m(...) __riscv_vsuxei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxei64_v_i32m1(...) __riscv_vsuxei64_v_i32m1(__VA_ARGS__) |
| #define | vsuxei64_v_i32m1_m(...) __riscv_vsuxei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_i32m2(...) __riscv_vsuxei64_v_i32m2(__VA_ARGS__) |
| #define | vsuxei64_v_i32m2_m(...) __riscv_vsuxei64_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_i32m4(...) __riscv_vsuxei64_v_i32m4(__VA_ARGS__) |
| #define | vsuxei64_v_i32m4_m(...) __riscv_vsuxei64_v_i32m4_m(__VA_ARGS__) |
| #define | vsuxei64_v_i32mf2(...) __riscv_vsuxei64_v_i32mf2(__VA_ARGS__) |
| #define | vsuxei64_v_i32mf2_m(...) __riscv_vsuxei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxei64_v_i64m1(...) __riscv_vsuxei64_v_i64m1(__VA_ARGS__) |
| #define | vsuxei64_v_i64m1_m(...) __riscv_vsuxei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_i64m2(...) __riscv_vsuxei64_v_i64m2(__VA_ARGS__) |
| #define | vsuxei64_v_i64m2_m(...) __riscv_vsuxei64_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_i64m4(...) __riscv_vsuxei64_v_i64m4(__VA_ARGS__) |
| #define | vsuxei64_v_i64m4_m(...) __riscv_vsuxei64_v_i64m4_m(__VA_ARGS__) |
| #define | vsuxei64_v_i64m8(...) __riscv_vsuxei64_v_i64m8(__VA_ARGS__) |
| #define | vsuxei64_v_i64m8_m(...) __riscv_vsuxei64_v_i64m8_m(__VA_ARGS__) |
| #define | vsuxei64_v_i8m1(...) __riscv_vsuxei64_v_i8m1(__VA_ARGS__) |
| #define | vsuxei64_v_i8m1_m(...) __riscv_vsuxei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_i8mf2(...) __riscv_vsuxei64_v_i8mf2(__VA_ARGS__) |
| #define | vsuxei64_v_i8mf2_m(...) __riscv_vsuxei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxei64_v_i8mf4(...) __riscv_vsuxei64_v_i8mf4(__VA_ARGS__) |
| #define | vsuxei64_v_i8mf4_m(...) __riscv_vsuxei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxei64_v_i8mf8(...) __riscv_vsuxei64_v_i8mf8(__VA_ARGS__) |
| #define | vsuxei64_v_i8mf8_m(...) __riscv_vsuxei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxei64_v_u16m1(...) __riscv_vsuxei64_v_u16m1(__VA_ARGS__) |
| #define | vsuxei64_v_u16m1_m(...) __riscv_vsuxei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_u16m2(...) __riscv_vsuxei64_v_u16m2(__VA_ARGS__) |
| #define | vsuxei64_v_u16m2_m(...) __riscv_vsuxei64_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_u16mf2(...) __riscv_vsuxei64_v_u16mf2(__VA_ARGS__) |
| #define | vsuxei64_v_u16mf2_m(...) __riscv_vsuxei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxei64_v_u16mf4(...) __riscv_vsuxei64_v_u16mf4(__VA_ARGS__) |
| #define | vsuxei64_v_u16mf4_m(...) __riscv_vsuxei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxei64_v_u32m1(...) __riscv_vsuxei64_v_u32m1(__VA_ARGS__) |
| #define | vsuxei64_v_u32m1_m(...) __riscv_vsuxei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_u32m2(...) __riscv_vsuxei64_v_u32m2(__VA_ARGS__) |
| #define | vsuxei64_v_u32m2_m(...) __riscv_vsuxei64_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_u32m4(...) __riscv_vsuxei64_v_u32m4(__VA_ARGS__) |
| #define | vsuxei64_v_u32m4_m(...) __riscv_vsuxei64_v_u32m4_m(__VA_ARGS__) |
| #define | vsuxei64_v_u32mf2(...) __riscv_vsuxei64_v_u32mf2(__VA_ARGS__) |
| #define | vsuxei64_v_u32mf2_m(...) __riscv_vsuxei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxei64_v_u64m1(...) __riscv_vsuxei64_v_u64m1(__VA_ARGS__) |
| #define | vsuxei64_v_u64m1_m(...) __riscv_vsuxei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_u64m2(...) __riscv_vsuxei64_v_u64m2(__VA_ARGS__) |
| #define | vsuxei64_v_u64m2_m(...) __riscv_vsuxei64_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxei64_v_u64m4(...) __riscv_vsuxei64_v_u64m4(__VA_ARGS__) |
| #define | vsuxei64_v_u64m4_m(...) __riscv_vsuxei64_v_u64m4_m(__VA_ARGS__) |
| #define | vsuxei64_v_u64m8(...) __riscv_vsuxei64_v_u64m8(__VA_ARGS__) |
| #define | vsuxei64_v_u64m8_m(...) __riscv_vsuxei64_v_u64m8_m(__VA_ARGS__) |
| #define | vsuxei64_v_u8m1(...) __riscv_vsuxei64_v_u8m1(__VA_ARGS__) |
| #define | vsuxei64_v_u8m1_m(...) __riscv_vsuxei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxei64_v_u8mf2(...) __riscv_vsuxei64_v_u8mf2(__VA_ARGS__) |
| #define | vsuxei64_v_u8mf2_m(...) __riscv_vsuxei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxei64_v_u8mf4(...) __riscv_vsuxei64_v_u8mf4(__VA_ARGS__) |
| #define | vsuxei64_v_u8mf4_m(...) __riscv_vsuxei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxei64_v_u8mf8(...) __riscv_vsuxei64_v_u8mf8(__VA_ARGS__) |
| #define | vsuxei64_v_u8mf8_m(...) __riscv_vsuxei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxei8_v_f16m1(...) __riscv_vsuxei8_v_f16m1(__VA_ARGS__) |
| #define | vsuxei8_v_f16m1_m(...) __riscv_vsuxei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_f16m2(...) __riscv_vsuxei8_v_f16m2(__VA_ARGS__) |
| #define | vsuxei8_v_f16m2_m(...) __riscv_vsuxei8_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_f16m4(...) __riscv_vsuxei8_v_f16m4(__VA_ARGS__) |
| #define | vsuxei8_v_f16m4_m(...) __riscv_vsuxei8_v_f16m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_f16m8(...) __riscv_vsuxei8_v_f16m8(__VA_ARGS__) |
| #define | vsuxei8_v_f16m8_m(...) __riscv_vsuxei8_v_f16m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_f16mf2(...) __riscv_vsuxei8_v_f16mf2(__VA_ARGS__) |
| #define | vsuxei8_v_f16mf2_m(...) __riscv_vsuxei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxei8_v_f16mf4(...) __riscv_vsuxei8_v_f16mf4(__VA_ARGS__) |
| #define | vsuxei8_v_f16mf4_m(...) __riscv_vsuxei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxei8_v_f32m1(...) __riscv_vsuxei8_v_f32m1(__VA_ARGS__) |
| #define | vsuxei8_v_f32m1_m(...) __riscv_vsuxei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_f32m2(...) __riscv_vsuxei8_v_f32m2(__VA_ARGS__) |
| #define | vsuxei8_v_f32m2_m(...) __riscv_vsuxei8_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_f32m4(...) __riscv_vsuxei8_v_f32m4(__VA_ARGS__) |
| #define | vsuxei8_v_f32m4_m(...) __riscv_vsuxei8_v_f32m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_f32m8(...) __riscv_vsuxei8_v_f32m8(__VA_ARGS__) |
| #define | vsuxei8_v_f32m8_m(...) __riscv_vsuxei8_v_f32m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_f32mf2(...) __riscv_vsuxei8_v_f32mf2(__VA_ARGS__) |
| #define | vsuxei8_v_f32mf2_m(...) __riscv_vsuxei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxei8_v_f64m1(...) __riscv_vsuxei8_v_f64m1(__VA_ARGS__) |
| #define | vsuxei8_v_f64m1_m(...) __riscv_vsuxei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_f64m2(...) __riscv_vsuxei8_v_f64m2(__VA_ARGS__) |
| #define | vsuxei8_v_f64m2_m(...) __riscv_vsuxei8_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_f64m4(...) __riscv_vsuxei8_v_f64m4(__VA_ARGS__) |
| #define | vsuxei8_v_f64m4_m(...) __riscv_vsuxei8_v_f64m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_f64m8(...) __riscv_vsuxei8_v_f64m8(__VA_ARGS__) |
| #define | vsuxei8_v_f64m8_m(...) __riscv_vsuxei8_v_f64m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_i16m1(...) __riscv_vsuxei8_v_i16m1(__VA_ARGS__) |
| #define | vsuxei8_v_i16m1_m(...) __riscv_vsuxei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_i16m2(...) __riscv_vsuxei8_v_i16m2(__VA_ARGS__) |
| #define | vsuxei8_v_i16m2_m(...) __riscv_vsuxei8_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_i16m4(...) __riscv_vsuxei8_v_i16m4(__VA_ARGS__) |
| #define | vsuxei8_v_i16m4_m(...) __riscv_vsuxei8_v_i16m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_i16m8(...) __riscv_vsuxei8_v_i16m8(__VA_ARGS__) |
| #define | vsuxei8_v_i16m8_m(...) __riscv_vsuxei8_v_i16m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_i16mf2(...) __riscv_vsuxei8_v_i16mf2(__VA_ARGS__) |
| #define | vsuxei8_v_i16mf2_m(...) __riscv_vsuxei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxei8_v_i16mf4(...) __riscv_vsuxei8_v_i16mf4(__VA_ARGS__) |
| #define | vsuxei8_v_i16mf4_m(...) __riscv_vsuxei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxei8_v_i32m1(...) __riscv_vsuxei8_v_i32m1(__VA_ARGS__) |
| #define | vsuxei8_v_i32m1_m(...) __riscv_vsuxei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_i32m2(...) __riscv_vsuxei8_v_i32m2(__VA_ARGS__) |
| #define | vsuxei8_v_i32m2_m(...) __riscv_vsuxei8_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_i32m4(...) __riscv_vsuxei8_v_i32m4(__VA_ARGS__) |
| #define | vsuxei8_v_i32m4_m(...) __riscv_vsuxei8_v_i32m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_i32m8(...) __riscv_vsuxei8_v_i32m8(__VA_ARGS__) |
| #define | vsuxei8_v_i32m8_m(...) __riscv_vsuxei8_v_i32m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_i32mf2(...) __riscv_vsuxei8_v_i32mf2(__VA_ARGS__) |
| #define | vsuxei8_v_i32mf2_m(...) __riscv_vsuxei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxei8_v_i64m1(...) __riscv_vsuxei8_v_i64m1(__VA_ARGS__) |
| #define | vsuxei8_v_i64m1_m(...) __riscv_vsuxei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_i64m2(...) __riscv_vsuxei8_v_i64m2(__VA_ARGS__) |
| #define | vsuxei8_v_i64m2_m(...) __riscv_vsuxei8_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_i64m4(...) __riscv_vsuxei8_v_i64m4(__VA_ARGS__) |
| #define | vsuxei8_v_i64m4_m(...) __riscv_vsuxei8_v_i64m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_i64m8(...) __riscv_vsuxei8_v_i64m8(__VA_ARGS__) |
| #define | vsuxei8_v_i64m8_m(...) __riscv_vsuxei8_v_i64m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_i8m1(...) __riscv_vsuxei8_v_i8m1(__VA_ARGS__) |
| #define | vsuxei8_v_i8m1_m(...) __riscv_vsuxei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_i8m2(...) __riscv_vsuxei8_v_i8m2(__VA_ARGS__) |
| #define | vsuxei8_v_i8m2_m(...) __riscv_vsuxei8_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_i8m4(...) __riscv_vsuxei8_v_i8m4(__VA_ARGS__) |
| #define | vsuxei8_v_i8m4_m(...) __riscv_vsuxei8_v_i8m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_i8m8(...) __riscv_vsuxei8_v_i8m8(__VA_ARGS__) |
| #define | vsuxei8_v_i8m8_m(...) __riscv_vsuxei8_v_i8m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_i8mf2(...) __riscv_vsuxei8_v_i8mf2(__VA_ARGS__) |
| #define | vsuxei8_v_i8mf2_m(...) __riscv_vsuxei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxei8_v_i8mf4(...) __riscv_vsuxei8_v_i8mf4(__VA_ARGS__) |
| #define | vsuxei8_v_i8mf4_m(...) __riscv_vsuxei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxei8_v_i8mf8(...) __riscv_vsuxei8_v_i8mf8(__VA_ARGS__) |
| #define | vsuxei8_v_i8mf8_m(...) __riscv_vsuxei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxei8_v_u16m1(...) __riscv_vsuxei8_v_u16m1(__VA_ARGS__) |
| #define | vsuxei8_v_u16m1_m(...) __riscv_vsuxei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_u16m2(...) __riscv_vsuxei8_v_u16m2(__VA_ARGS__) |
| #define | vsuxei8_v_u16m2_m(...) __riscv_vsuxei8_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_u16m4(...) __riscv_vsuxei8_v_u16m4(__VA_ARGS__) |
| #define | vsuxei8_v_u16m4_m(...) __riscv_vsuxei8_v_u16m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_u16m8(...) __riscv_vsuxei8_v_u16m8(__VA_ARGS__) |
| #define | vsuxei8_v_u16m8_m(...) __riscv_vsuxei8_v_u16m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_u16mf2(...) __riscv_vsuxei8_v_u16mf2(__VA_ARGS__) |
| #define | vsuxei8_v_u16mf2_m(...) __riscv_vsuxei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxei8_v_u16mf4(...) __riscv_vsuxei8_v_u16mf4(__VA_ARGS__) |
| #define | vsuxei8_v_u16mf4_m(...) __riscv_vsuxei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxei8_v_u32m1(...) __riscv_vsuxei8_v_u32m1(__VA_ARGS__) |
| #define | vsuxei8_v_u32m1_m(...) __riscv_vsuxei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_u32m2(...) __riscv_vsuxei8_v_u32m2(__VA_ARGS__) |
| #define | vsuxei8_v_u32m2_m(...) __riscv_vsuxei8_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_u32m4(...) __riscv_vsuxei8_v_u32m4(__VA_ARGS__) |
| #define | vsuxei8_v_u32m4_m(...) __riscv_vsuxei8_v_u32m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_u32m8(...) __riscv_vsuxei8_v_u32m8(__VA_ARGS__) |
| #define | vsuxei8_v_u32m8_m(...) __riscv_vsuxei8_v_u32m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_u32mf2(...) __riscv_vsuxei8_v_u32mf2(__VA_ARGS__) |
| #define | vsuxei8_v_u32mf2_m(...) __riscv_vsuxei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxei8_v_u64m1(...) __riscv_vsuxei8_v_u64m1(__VA_ARGS__) |
| #define | vsuxei8_v_u64m1_m(...) __riscv_vsuxei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_u64m2(...) __riscv_vsuxei8_v_u64m2(__VA_ARGS__) |
| #define | vsuxei8_v_u64m2_m(...) __riscv_vsuxei8_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_u64m4(...) __riscv_vsuxei8_v_u64m4(__VA_ARGS__) |
| #define | vsuxei8_v_u64m4_m(...) __riscv_vsuxei8_v_u64m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_u64m8(...) __riscv_vsuxei8_v_u64m8(__VA_ARGS__) |
| #define | vsuxei8_v_u64m8_m(...) __riscv_vsuxei8_v_u64m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_u8m1(...) __riscv_vsuxei8_v_u8m1(__VA_ARGS__) |
| #define | vsuxei8_v_u8m1_m(...) __riscv_vsuxei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxei8_v_u8m2(...) __riscv_vsuxei8_v_u8m2(__VA_ARGS__) |
| #define | vsuxei8_v_u8m2_m(...) __riscv_vsuxei8_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxei8_v_u8m4(...) __riscv_vsuxei8_v_u8m4(__VA_ARGS__) |
| #define | vsuxei8_v_u8m4_m(...) __riscv_vsuxei8_v_u8m4_m(__VA_ARGS__) |
| #define | vsuxei8_v_u8m8(...) __riscv_vsuxei8_v_u8m8(__VA_ARGS__) |
| #define | vsuxei8_v_u8m8_m(...) __riscv_vsuxei8_v_u8m8_m(__VA_ARGS__) |
| #define | vsuxei8_v_u8mf2(...) __riscv_vsuxei8_v_u8mf2(__VA_ARGS__) |
| #define | vsuxei8_v_u8mf2_m(...) __riscv_vsuxei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxei8_v_u8mf4(...) __riscv_vsuxei8_v_u8mf4(__VA_ARGS__) |
| #define | vsuxei8_v_u8mf4_m(...) __riscv_vsuxei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxei8_v_u8mf8(...) __riscv_vsuxei8_v_u8mf8(__VA_ARGS__) |
| #define | vsuxei8_v_u8mf8_m(...) __riscv_vsuxei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16m1(...) __riscv_vsuxseg2ei16_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16m1_m(...) __riscv_vsuxseg2ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16m2(...) __riscv_vsuxseg2ei16_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16m2_m(...) __riscv_vsuxseg2ei16_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16m4(...) __riscv_vsuxseg2ei16_v_f16m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16m4_m(...) __riscv_vsuxseg2ei16_v_f16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16mf2(...) __riscv_vsuxseg2ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16mf2_m(...) __riscv_vsuxseg2ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16mf4(...) __riscv_vsuxseg2ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f16mf4_m(...) __riscv_vsuxseg2ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f32m1(...) __riscv_vsuxseg2ei16_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f32m1_m(...) __riscv_vsuxseg2ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f32m2(...) __riscv_vsuxseg2ei16_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f32m2_m(...) __riscv_vsuxseg2ei16_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f32m4(...) __riscv_vsuxseg2ei16_v_f32m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f32m4_m(...) __riscv_vsuxseg2ei16_v_f32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f32mf2(...) __riscv_vsuxseg2ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f32mf2_m(...) __riscv_vsuxseg2ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f64m1(...) __riscv_vsuxseg2ei16_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f64m1_m(...) __riscv_vsuxseg2ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f64m2(...) __riscv_vsuxseg2ei16_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f64m2_m(...) __riscv_vsuxseg2ei16_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f64m4(...) __riscv_vsuxseg2ei16_v_f64m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_f64m4_m(...) __riscv_vsuxseg2ei16_v_f64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16m1(...) __riscv_vsuxseg2ei16_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16m1_m(...) __riscv_vsuxseg2ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16m2(...) __riscv_vsuxseg2ei16_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16m2_m(...) __riscv_vsuxseg2ei16_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16m4(...) __riscv_vsuxseg2ei16_v_i16m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16m4_m(...) __riscv_vsuxseg2ei16_v_i16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16mf2(...) __riscv_vsuxseg2ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16mf2_m(...) __riscv_vsuxseg2ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16mf4(...) __riscv_vsuxseg2ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i16mf4_m(...) __riscv_vsuxseg2ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i32m1(...) __riscv_vsuxseg2ei16_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i32m1_m(...) __riscv_vsuxseg2ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i32m2(...) __riscv_vsuxseg2ei16_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i32m2_m(...) __riscv_vsuxseg2ei16_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i32m4(...) __riscv_vsuxseg2ei16_v_i32m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i32m4_m(...) __riscv_vsuxseg2ei16_v_i32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i32mf2(...) __riscv_vsuxseg2ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i32mf2_m(...) __riscv_vsuxseg2ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i64m1(...) __riscv_vsuxseg2ei16_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i64m1_m(...) __riscv_vsuxseg2ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i64m2(...) __riscv_vsuxseg2ei16_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i64m2_m(...) __riscv_vsuxseg2ei16_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i64m4(...) __riscv_vsuxseg2ei16_v_i64m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i64m4_m(...) __riscv_vsuxseg2ei16_v_i64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8m1(...) __riscv_vsuxseg2ei16_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8m1_m(...) __riscv_vsuxseg2ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8m2(...) __riscv_vsuxseg2ei16_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8m2_m(...) __riscv_vsuxseg2ei16_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8m4(...) __riscv_vsuxseg2ei16_v_i8m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8m4_m(...) __riscv_vsuxseg2ei16_v_i8m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8mf2(...) __riscv_vsuxseg2ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8mf2_m(...) __riscv_vsuxseg2ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8mf4(...) __riscv_vsuxseg2ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8mf4_m(...) __riscv_vsuxseg2ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8mf8(...) __riscv_vsuxseg2ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_i8mf8_m(...) __riscv_vsuxseg2ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16m1(...) __riscv_vsuxseg2ei16_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16m1_m(...) __riscv_vsuxseg2ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16m2(...) __riscv_vsuxseg2ei16_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16m2_m(...) __riscv_vsuxseg2ei16_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16m4(...) __riscv_vsuxseg2ei16_v_u16m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16m4_m(...) __riscv_vsuxseg2ei16_v_u16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16mf2(...) __riscv_vsuxseg2ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16mf2_m(...) __riscv_vsuxseg2ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16mf4(...) __riscv_vsuxseg2ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u16mf4_m(...) __riscv_vsuxseg2ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u32m1(...) __riscv_vsuxseg2ei16_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u32m1_m(...) __riscv_vsuxseg2ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u32m2(...) __riscv_vsuxseg2ei16_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u32m2_m(...) __riscv_vsuxseg2ei16_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u32m4(...) __riscv_vsuxseg2ei16_v_u32m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u32m4_m(...) __riscv_vsuxseg2ei16_v_u32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u32mf2(...) __riscv_vsuxseg2ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u32mf2_m(...) __riscv_vsuxseg2ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u64m1(...) __riscv_vsuxseg2ei16_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u64m1_m(...) __riscv_vsuxseg2ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u64m2(...) __riscv_vsuxseg2ei16_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u64m2_m(...) __riscv_vsuxseg2ei16_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u64m4(...) __riscv_vsuxseg2ei16_v_u64m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u64m4_m(...) __riscv_vsuxseg2ei16_v_u64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8m1(...) __riscv_vsuxseg2ei16_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8m1_m(...) __riscv_vsuxseg2ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8m2(...) __riscv_vsuxseg2ei16_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8m2_m(...) __riscv_vsuxseg2ei16_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8m4(...) __riscv_vsuxseg2ei16_v_u8m4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8m4_m(...) __riscv_vsuxseg2ei16_v_u8m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8mf2(...) __riscv_vsuxseg2ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8mf2_m(...) __riscv_vsuxseg2ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8mf4(...) __riscv_vsuxseg2ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8mf4_m(...) __riscv_vsuxseg2ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8mf8(...) __riscv_vsuxseg2ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg2ei16_v_u8mf8_m(...) __riscv_vsuxseg2ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16m1(...) __riscv_vsuxseg2ei32_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16m1_m(...) __riscv_vsuxseg2ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16m2(...) __riscv_vsuxseg2ei32_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16m2_m(...) __riscv_vsuxseg2ei32_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16m4(...) __riscv_vsuxseg2ei32_v_f16m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16m4_m(...) __riscv_vsuxseg2ei32_v_f16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16mf2(...) __riscv_vsuxseg2ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16mf2_m(...) __riscv_vsuxseg2ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16mf4(...) __riscv_vsuxseg2ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f16mf4_m(...) __riscv_vsuxseg2ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f32m1(...) __riscv_vsuxseg2ei32_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f32m1_m(...) __riscv_vsuxseg2ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f32m2(...) __riscv_vsuxseg2ei32_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f32m2_m(...) __riscv_vsuxseg2ei32_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f32m4(...) __riscv_vsuxseg2ei32_v_f32m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f32m4_m(...) __riscv_vsuxseg2ei32_v_f32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f32mf2(...) __riscv_vsuxseg2ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f32mf2_m(...) __riscv_vsuxseg2ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f64m1(...) __riscv_vsuxseg2ei32_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f64m1_m(...) __riscv_vsuxseg2ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f64m2(...) __riscv_vsuxseg2ei32_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f64m2_m(...) __riscv_vsuxseg2ei32_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f64m4(...) __riscv_vsuxseg2ei32_v_f64m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_f64m4_m(...) __riscv_vsuxseg2ei32_v_f64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16m1(...) __riscv_vsuxseg2ei32_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16m1_m(...) __riscv_vsuxseg2ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16m2(...) __riscv_vsuxseg2ei32_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16m2_m(...) __riscv_vsuxseg2ei32_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16m4(...) __riscv_vsuxseg2ei32_v_i16m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16m4_m(...) __riscv_vsuxseg2ei32_v_i16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16mf2(...) __riscv_vsuxseg2ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16mf2_m(...) __riscv_vsuxseg2ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16mf4(...) __riscv_vsuxseg2ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i16mf4_m(...) __riscv_vsuxseg2ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i32m1(...) __riscv_vsuxseg2ei32_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i32m1_m(...) __riscv_vsuxseg2ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i32m2(...) __riscv_vsuxseg2ei32_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i32m2_m(...) __riscv_vsuxseg2ei32_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i32m4(...) __riscv_vsuxseg2ei32_v_i32m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i32m4_m(...) __riscv_vsuxseg2ei32_v_i32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i32mf2(...) __riscv_vsuxseg2ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i32mf2_m(...) __riscv_vsuxseg2ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i64m1(...) __riscv_vsuxseg2ei32_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i64m1_m(...) __riscv_vsuxseg2ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i64m2(...) __riscv_vsuxseg2ei32_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i64m2_m(...) __riscv_vsuxseg2ei32_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i64m4(...) __riscv_vsuxseg2ei32_v_i64m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i64m4_m(...) __riscv_vsuxseg2ei32_v_i64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8m1(...) __riscv_vsuxseg2ei32_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8m1_m(...) __riscv_vsuxseg2ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8m2(...) __riscv_vsuxseg2ei32_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8m2_m(...) __riscv_vsuxseg2ei32_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8mf2(...) __riscv_vsuxseg2ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8mf2_m(...) __riscv_vsuxseg2ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8mf4(...) __riscv_vsuxseg2ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8mf4_m(...) __riscv_vsuxseg2ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8mf8(...) __riscv_vsuxseg2ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_i8mf8_m(...) __riscv_vsuxseg2ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16m1(...) __riscv_vsuxseg2ei32_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16m1_m(...) __riscv_vsuxseg2ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16m2(...) __riscv_vsuxseg2ei32_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16m2_m(...) __riscv_vsuxseg2ei32_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16m4(...) __riscv_vsuxseg2ei32_v_u16m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16m4_m(...) __riscv_vsuxseg2ei32_v_u16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16mf2(...) __riscv_vsuxseg2ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16mf2_m(...) __riscv_vsuxseg2ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16mf4(...) __riscv_vsuxseg2ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u16mf4_m(...) __riscv_vsuxseg2ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u32m1(...) __riscv_vsuxseg2ei32_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u32m1_m(...) __riscv_vsuxseg2ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u32m2(...) __riscv_vsuxseg2ei32_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u32m2_m(...) __riscv_vsuxseg2ei32_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u32m4(...) __riscv_vsuxseg2ei32_v_u32m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u32m4_m(...) __riscv_vsuxseg2ei32_v_u32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u32mf2(...) __riscv_vsuxseg2ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u32mf2_m(...) __riscv_vsuxseg2ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u64m1(...) __riscv_vsuxseg2ei32_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u64m1_m(...) __riscv_vsuxseg2ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u64m2(...) __riscv_vsuxseg2ei32_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u64m2_m(...) __riscv_vsuxseg2ei32_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u64m4(...) __riscv_vsuxseg2ei32_v_u64m4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u64m4_m(...) __riscv_vsuxseg2ei32_v_u64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8m1(...) __riscv_vsuxseg2ei32_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8m1_m(...) __riscv_vsuxseg2ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8m2(...) __riscv_vsuxseg2ei32_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8m2_m(...) __riscv_vsuxseg2ei32_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8mf2(...) __riscv_vsuxseg2ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8mf2_m(...) __riscv_vsuxseg2ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8mf4(...) __riscv_vsuxseg2ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8mf4_m(...) __riscv_vsuxseg2ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8mf8(...) __riscv_vsuxseg2ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg2ei32_v_u8mf8_m(...) __riscv_vsuxseg2ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f16m1(...) __riscv_vsuxseg2ei64_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f16m1_m(...) __riscv_vsuxseg2ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f16m2(...) __riscv_vsuxseg2ei64_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f16m2_m(...) __riscv_vsuxseg2ei64_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f16mf2(...) __riscv_vsuxseg2ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f16mf2_m(...) __riscv_vsuxseg2ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f16mf4(...) __riscv_vsuxseg2ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f16mf4_m(...) __riscv_vsuxseg2ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f32m1(...) __riscv_vsuxseg2ei64_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f32m1_m(...) __riscv_vsuxseg2ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f32m2(...) __riscv_vsuxseg2ei64_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f32m2_m(...) __riscv_vsuxseg2ei64_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f32m4(...) __riscv_vsuxseg2ei64_v_f32m4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f32m4_m(...) __riscv_vsuxseg2ei64_v_f32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f32mf2(...) __riscv_vsuxseg2ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f32mf2_m(...) __riscv_vsuxseg2ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f64m1(...) __riscv_vsuxseg2ei64_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f64m1_m(...) __riscv_vsuxseg2ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f64m2(...) __riscv_vsuxseg2ei64_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f64m2_m(...) __riscv_vsuxseg2ei64_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f64m4(...) __riscv_vsuxseg2ei64_v_f64m4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_f64m4_m(...) __riscv_vsuxseg2ei64_v_f64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i16m1(...) __riscv_vsuxseg2ei64_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i16m1_m(...) __riscv_vsuxseg2ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i16m2(...) __riscv_vsuxseg2ei64_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i16m2_m(...) __riscv_vsuxseg2ei64_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i16mf2(...) __riscv_vsuxseg2ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i16mf2_m(...) __riscv_vsuxseg2ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i16mf4(...) __riscv_vsuxseg2ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i16mf4_m(...) __riscv_vsuxseg2ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i32m1(...) __riscv_vsuxseg2ei64_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i32m1_m(...) __riscv_vsuxseg2ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i32m2(...) __riscv_vsuxseg2ei64_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i32m2_m(...) __riscv_vsuxseg2ei64_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i32m4(...) __riscv_vsuxseg2ei64_v_i32m4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i32m4_m(...) __riscv_vsuxseg2ei64_v_i32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i32mf2(...) __riscv_vsuxseg2ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i32mf2_m(...) __riscv_vsuxseg2ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i64m1(...) __riscv_vsuxseg2ei64_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i64m1_m(...) __riscv_vsuxseg2ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i64m2(...) __riscv_vsuxseg2ei64_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i64m2_m(...) __riscv_vsuxseg2ei64_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i64m4(...) __riscv_vsuxseg2ei64_v_i64m4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i64m4_m(...) __riscv_vsuxseg2ei64_v_i64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i8m1(...) __riscv_vsuxseg2ei64_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i8m1_m(...) __riscv_vsuxseg2ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i8mf2(...) __riscv_vsuxseg2ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i8mf2_m(...) __riscv_vsuxseg2ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i8mf4(...) __riscv_vsuxseg2ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i8mf4_m(...) __riscv_vsuxseg2ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i8mf8(...) __riscv_vsuxseg2ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_i8mf8_m(...) __riscv_vsuxseg2ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u16m1(...) __riscv_vsuxseg2ei64_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u16m1_m(...) __riscv_vsuxseg2ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u16m2(...) __riscv_vsuxseg2ei64_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u16m2_m(...) __riscv_vsuxseg2ei64_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u16mf2(...) __riscv_vsuxseg2ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u16mf2_m(...) __riscv_vsuxseg2ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u16mf4(...) __riscv_vsuxseg2ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u16mf4_m(...) __riscv_vsuxseg2ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u32m1(...) __riscv_vsuxseg2ei64_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u32m1_m(...) __riscv_vsuxseg2ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u32m2(...) __riscv_vsuxseg2ei64_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u32m2_m(...) __riscv_vsuxseg2ei64_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u32m4(...) __riscv_vsuxseg2ei64_v_u32m4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u32m4_m(...) __riscv_vsuxseg2ei64_v_u32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u32mf2(...) __riscv_vsuxseg2ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u32mf2_m(...) __riscv_vsuxseg2ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u64m1(...) __riscv_vsuxseg2ei64_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u64m1_m(...) __riscv_vsuxseg2ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u64m2(...) __riscv_vsuxseg2ei64_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u64m2_m(...) __riscv_vsuxseg2ei64_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u64m4(...) __riscv_vsuxseg2ei64_v_u64m4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u64m4_m(...) __riscv_vsuxseg2ei64_v_u64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u8m1(...) __riscv_vsuxseg2ei64_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u8m1_m(...) __riscv_vsuxseg2ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u8mf2(...) __riscv_vsuxseg2ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u8mf2_m(...) __riscv_vsuxseg2ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u8mf4(...) __riscv_vsuxseg2ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u8mf4_m(...) __riscv_vsuxseg2ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u8mf8(...) __riscv_vsuxseg2ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg2ei64_v_u8mf8_m(...) __riscv_vsuxseg2ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16m1(...) __riscv_vsuxseg2ei8_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16m1_m(...) __riscv_vsuxseg2ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16m2(...) __riscv_vsuxseg2ei8_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16m2_m(...) __riscv_vsuxseg2ei8_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16m4(...) __riscv_vsuxseg2ei8_v_f16m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16m4_m(...) __riscv_vsuxseg2ei8_v_f16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16mf2(...) __riscv_vsuxseg2ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16mf2_m(...) __riscv_vsuxseg2ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16mf4(...) __riscv_vsuxseg2ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f16mf4_m(...) __riscv_vsuxseg2ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f32m1(...) __riscv_vsuxseg2ei8_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f32m1_m(...) __riscv_vsuxseg2ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f32m2(...) __riscv_vsuxseg2ei8_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f32m2_m(...) __riscv_vsuxseg2ei8_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f32m4(...) __riscv_vsuxseg2ei8_v_f32m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f32m4_m(...) __riscv_vsuxseg2ei8_v_f32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f32mf2(...) __riscv_vsuxseg2ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f32mf2_m(...) __riscv_vsuxseg2ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f64m1(...) __riscv_vsuxseg2ei8_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f64m1_m(...) __riscv_vsuxseg2ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f64m2(...) __riscv_vsuxseg2ei8_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f64m2_m(...) __riscv_vsuxseg2ei8_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f64m4(...) __riscv_vsuxseg2ei8_v_f64m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_f64m4_m(...) __riscv_vsuxseg2ei8_v_f64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16m1(...) __riscv_vsuxseg2ei8_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16m1_m(...) __riscv_vsuxseg2ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16m2(...) __riscv_vsuxseg2ei8_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16m2_m(...) __riscv_vsuxseg2ei8_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16m4(...) __riscv_vsuxseg2ei8_v_i16m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16m4_m(...) __riscv_vsuxseg2ei8_v_i16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16mf2(...) __riscv_vsuxseg2ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16mf2_m(...) __riscv_vsuxseg2ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16mf4(...) __riscv_vsuxseg2ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i16mf4_m(...) __riscv_vsuxseg2ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i32m1(...) __riscv_vsuxseg2ei8_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i32m1_m(...) __riscv_vsuxseg2ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i32m2(...) __riscv_vsuxseg2ei8_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i32m2_m(...) __riscv_vsuxseg2ei8_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i32m4(...) __riscv_vsuxseg2ei8_v_i32m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i32m4_m(...) __riscv_vsuxseg2ei8_v_i32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i32mf2(...) __riscv_vsuxseg2ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i32mf2_m(...) __riscv_vsuxseg2ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i64m1(...) __riscv_vsuxseg2ei8_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i64m1_m(...) __riscv_vsuxseg2ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i64m2(...) __riscv_vsuxseg2ei8_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i64m2_m(...) __riscv_vsuxseg2ei8_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i64m4(...) __riscv_vsuxseg2ei8_v_i64m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i64m4_m(...) __riscv_vsuxseg2ei8_v_i64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8m1(...) __riscv_vsuxseg2ei8_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8m1_m(...) __riscv_vsuxseg2ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8m2(...) __riscv_vsuxseg2ei8_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8m2_m(...) __riscv_vsuxseg2ei8_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8m4(...) __riscv_vsuxseg2ei8_v_i8m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8m4_m(...) __riscv_vsuxseg2ei8_v_i8m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8mf2(...) __riscv_vsuxseg2ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8mf2_m(...) __riscv_vsuxseg2ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8mf4(...) __riscv_vsuxseg2ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8mf4_m(...) __riscv_vsuxseg2ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8mf8(...) __riscv_vsuxseg2ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_i8mf8_m(...) __riscv_vsuxseg2ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16m1(...) __riscv_vsuxseg2ei8_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16m1_m(...) __riscv_vsuxseg2ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16m2(...) __riscv_vsuxseg2ei8_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16m2_m(...) __riscv_vsuxseg2ei8_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16m4(...) __riscv_vsuxseg2ei8_v_u16m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16m4_m(...) __riscv_vsuxseg2ei8_v_u16m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16mf2(...) __riscv_vsuxseg2ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16mf2_m(...) __riscv_vsuxseg2ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16mf4(...) __riscv_vsuxseg2ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u16mf4_m(...) __riscv_vsuxseg2ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u32m1(...) __riscv_vsuxseg2ei8_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u32m1_m(...) __riscv_vsuxseg2ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u32m2(...) __riscv_vsuxseg2ei8_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u32m2_m(...) __riscv_vsuxseg2ei8_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u32m4(...) __riscv_vsuxseg2ei8_v_u32m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u32m4_m(...) __riscv_vsuxseg2ei8_v_u32m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u32mf2(...) __riscv_vsuxseg2ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u32mf2_m(...) __riscv_vsuxseg2ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u64m1(...) __riscv_vsuxseg2ei8_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u64m1_m(...) __riscv_vsuxseg2ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u64m2(...) __riscv_vsuxseg2ei8_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u64m2_m(...) __riscv_vsuxseg2ei8_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u64m4(...) __riscv_vsuxseg2ei8_v_u64m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u64m4_m(...) __riscv_vsuxseg2ei8_v_u64m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8m1(...) __riscv_vsuxseg2ei8_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8m1_m(...) __riscv_vsuxseg2ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8m2(...) __riscv_vsuxseg2ei8_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8m2_m(...) __riscv_vsuxseg2ei8_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8m4(...) __riscv_vsuxseg2ei8_v_u8m4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8m4_m(...) __riscv_vsuxseg2ei8_v_u8m4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8mf2(...) __riscv_vsuxseg2ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8mf2_m(...) __riscv_vsuxseg2ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8mf4(...) __riscv_vsuxseg2ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8mf4_m(...) __riscv_vsuxseg2ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8mf8(...) __riscv_vsuxseg2ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg2ei8_v_u8mf8_m(...) __riscv_vsuxseg2ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f16m1(...) __riscv_vsuxseg3ei16_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f16m1_m(...) __riscv_vsuxseg3ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f16m2(...) __riscv_vsuxseg3ei16_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f16m2_m(...) __riscv_vsuxseg3ei16_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f16mf2(...) __riscv_vsuxseg3ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f16mf2_m(...) __riscv_vsuxseg3ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f16mf4(...) __riscv_vsuxseg3ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f16mf4_m(...) __riscv_vsuxseg3ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f32m1(...) __riscv_vsuxseg3ei16_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f32m1_m(...) __riscv_vsuxseg3ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f32m2(...) __riscv_vsuxseg3ei16_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f32m2_m(...) __riscv_vsuxseg3ei16_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f32mf2(...) __riscv_vsuxseg3ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f32mf2_m(...) __riscv_vsuxseg3ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f64m1(...) __riscv_vsuxseg3ei16_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f64m1_m(...) __riscv_vsuxseg3ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f64m2(...) __riscv_vsuxseg3ei16_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_f64m2_m(...) __riscv_vsuxseg3ei16_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i16m1(...) __riscv_vsuxseg3ei16_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i16m1_m(...) __riscv_vsuxseg3ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i16m2(...) __riscv_vsuxseg3ei16_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i16m2_m(...) __riscv_vsuxseg3ei16_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i16mf2(...) __riscv_vsuxseg3ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i16mf2_m(...) __riscv_vsuxseg3ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i16mf4(...) __riscv_vsuxseg3ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i16mf4_m(...) __riscv_vsuxseg3ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i32m1(...) __riscv_vsuxseg3ei16_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i32m1_m(...) __riscv_vsuxseg3ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i32m2(...) __riscv_vsuxseg3ei16_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i32m2_m(...) __riscv_vsuxseg3ei16_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i32mf2(...) __riscv_vsuxseg3ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i32mf2_m(...) __riscv_vsuxseg3ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i64m1(...) __riscv_vsuxseg3ei16_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i64m1_m(...) __riscv_vsuxseg3ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i64m2(...) __riscv_vsuxseg3ei16_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i64m2_m(...) __riscv_vsuxseg3ei16_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8m1(...) __riscv_vsuxseg3ei16_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8m1_m(...) __riscv_vsuxseg3ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8m2(...) __riscv_vsuxseg3ei16_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8m2_m(...) __riscv_vsuxseg3ei16_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8mf2(...) __riscv_vsuxseg3ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8mf2_m(...) __riscv_vsuxseg3ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8mf4(...) __riscv_vsuxseg3ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8mf4_m(...) __riscv_vsuxseg3ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8mf8(...) __riscv_vsuxseg3ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_i8mf8_m(...) __riscv_vsuxseg3ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u16m1(...) __riscv_vsuxseg3ei16_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u16m1_m(...) __riscv_vsuxseg3ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u16m2(...) __riscv_vsuxseg3ei16_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u16m2_m(...) __riscv_vsuxseg3ei16_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u16mf2(...) __riscv_vsuxseg3ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u16mf2_m(...) __riscv_vsuxseg3ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u16mf4(...) __riscv_vsuxseg3ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u16mf4_m(...) __riscv_vsuxseg3ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u32m1(...) __riscv_vsuxseg3ei16_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u32m1_m(...) __riscv_vsuxseg3ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u32m2(...) __riscv_vsuxseg3ei16_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u32m2_m(...) __riscv_vsuxseg3ei16_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u32mf2(...) __riscv_vsuxseg3ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u32mf2_m(...) __riscv_vsuxseg3ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u64m1(...) __riscv_vsuxseg3ei16_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u64m1_m(...) __riscv_vsuxseg3ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u64m2(...) __riscv_vsuxseg3ei16_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u64m2_m(...) __riscv_vsuxseg3ei16_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8m1(...) __riscv_vsuxseg3ei16_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8m1_m(...) __riscv_vsuxseg3ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8m2(...) __riscv_vsuxseg3ei16_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8m2_m(...) __riscv_vsuxseg3ei16_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8mf2(...) __riscv_vsuxseg3ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8mf2_m(...) __riscv_vsuxseg3ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8mf4(...) __riscv_vsuxseg3ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8mf4_m(...) __riscv_vsuxseg3ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8mf8(...) __riscv_vsuxseg3ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg3ei16_v_u8mf8_m(...) __riscv_vsuxseg3ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f16m1(...) __riscv_vsuxseg3ei32_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f16m1_m(...) __riscv_vsuxseg3ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f16m2(...) __riscv_vsuxseg3ei32_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f16m2_m(...) __riscv_vsuxseg3ei32_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f16mf2(...) __riscv_vsuxseg3ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f16mf2_m(...) __riscv_vsuxseg3ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f16mf4(...) __riscv_vsuxseg3ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f16mf4_m(...) __riscv_vsuxseg3ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f32m1(...) __riscv_vsuxseg3ei32_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f32m1_m(...) __riscv_vsuxseg3ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f32m2(...) __riscv_vsuxseg3ei32_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f32m2_m(...) __riscv_vsuxseg3ei32_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f32mf2(...) __riscv_vsuxseg3ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f32mf2_m(...) __riscv_vsuxseg3ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f64m1(...) __riscv_vsuxseg3ei32_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f64m1_m(...) __riscv_vsuxseg3ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f64m2(...) __riscv_vsuxseg3ei32_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_f64m2_m(...) __riscv_vsuxseg3ei32_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i16m1(...) __riscv_vsuxseg3ei32_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i16m1_m(...) __riscv_vsuxseg3ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i16m2(...) __riscv_vsuxseg3ei32_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i16m2_m(...) __riscv_vsuxseg3ei32_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i16mf2(...) __riscv_vsuxseg3ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i16mf2_m(...) __riscv_vsuxseg3ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i16mf4(...) __riscv_vsuxseg3ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i16mf4_m(...) __riscv_vsuxseg3ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i32m1(...) __riscv_vsuxseg3ei32_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i32m1_m(...) __riscv_vsuxseg3ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i32m2(...) __riscv_vsuxseg3ei32_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i32m2_m(...) __riscv_vsuxseg3ei32_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i32mf2(...) __riscv_vsuxseg3ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i32mf2_m(...) __riscv_vsuxseg3ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i64m1(...) __riscv_vsuxseg3ei32_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i64m1_m(...) __riscv_vsuxseg3ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i64m2(...) __riscv_vsuxseg3ei32_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i64m2_m(...) __riscv_vsuxseg3ei32_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8m1(...) __riscv_vsuxseg3ei32_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8m1_m(...) __riscv_vsuxseg3ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8m2(...) __riscv_vsuxseg3ei32_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8m2_m(...) __riscv_vsuxseg3ei32_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8mf2(...) __riscv_vsuxseg3ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8mf2_m(...) __riscv_vsuxseg3ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8mf4(...) __riscv_vsuxseg3ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8mf4_m(...) __riscv_vsuxseg3ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8mf8(...) __riscv_vsuxseg3ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_i8mf8_m(...) __riscv_vsuxseg3ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u16m1(...) __riscv_vsuxseg3ei32_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u16m1_m(...) __riscv_vsuxseg3ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u16m2(...) __riscv_vsuxseg3ei32_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u16m2_m(...) __riscv_vsuxseg3ei32_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u16mf2(...) __riscv_vsuxseg3ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u16mf2_m(...) __riscv_vsuxseg3ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u16mf4(...) __riscv_vsuxseg3ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u16mf4_m(...) __riscv_vsuxseg3ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u32m1(...) __riscv_vsuxseg3ei32_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u32m1_m(...) __riscv_vsuxseg3ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u32m2(...) __riscv_vsuxseg3ei32_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u32m2_m(...) __riscv_vsuxseg3ei32_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u32mf2(...) __riscv_vsuxseg3ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u32mf2_m(...) __riscv_vsuxseg3ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u64m1(...) __riscv_vsuxseg3ei32_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u64m1_m(...) __riscv_vsuxseg3ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u64m2(...) __riscv_vsuxseg3ei32_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u64m2_m(...) __riscv_vsuxseg3ei32_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8m1(...) __riscv_vsuxseg3ei32_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8m1_m(...) __riscv_vsuxseg3ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8m2(...) __riscv_vsuxseg3ei32_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8m2_m(...) __riscv_vsuxseg3ei32_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8mf2(...) __riscv_vsuxseg3ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8mf2_m(...) __riscv_vsuxseg3ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8mf4(...) __riscv_vsuxseg3ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8mf4_m(...) __riscv_vsuxseg3ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8mf8(...) __riscv_vsuxseg3ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg3ei32_v_u8mf8_m(...) __riscv_vsuxseg3ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f16m1(...) __riscv_vsuxseg3ei64_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f16m1_m(...) __riscv_vsuxseg3ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f16m2(...) __riscv_vsuxseg3ei64_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f16m2_m(...) __riscv_vsuxseg3ei64_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f16mf2(...) __riscv_vsuxseg3ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f16mf2_m(...) __riscv_vsuxseg3ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f16mf4(...) __riscv_vsuxseg3ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f16mf4_m(...) __riscv_vsuxseg3ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f32m1(...) __riscv_vsuxseg3ei64_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f32m1_m(...) __riscv_vsuxseg3ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f32m2(...) __riscv_vsuxseg3ei64_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f32m2_m(...) __riscv_vsuxseg3ei64_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f32mf2(...) __riscv_vsuxseg3ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f32mf2_m(...) __riscv_vsuxseg3ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f64m1(...) __riscv_vsuxseg3ei64_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f64m1_m(...) __riscv_vsuxseg3ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f64m2(...) __riscv_vsuxseg3ei64_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_f64m2_m(...) __riscv_vsuxseg3ei64_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i16m1(...) __riscv_vsuxseg3ei64_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i16m1_m(...) __riscv_vsuxseg3ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i16m2(...) __riscv_vsuxseg3ei64_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i16m2_m(...) __riscv_vsuxseg3ei64_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i16mf2(...) __riscv_vsuxseg3ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i16mf2_m(...) __riscv_vsuxseg3ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i16mf4(...) __riscv_vsuxseg3ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i16mf4_m(...) __riscv_vsuxseg3ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i32m1(...) __riscv_vsuxseg3ei64_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i32m1_m(...) __riscv_vsuxseg3ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i32m2(...) __riscv_vsuxseg3ei64_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i32m2_m(...) __riscv_vsuxseg3ei64_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i32mf2(...) __riscv_vsuxseg3ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i32mf2_m(...) __riscv_vsuxseg3ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i64m1(...) __riscv_vsuxseg3ei64_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i64m1_m(...) __riscv_vsuxseg3ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i64m2(...) __riscv_vsuxseg3ei64_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i64m2_m(...) __riscv_vsuxseg3ei64_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i8m1(...) __riscv_vsuxseg3ei64_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i8m1_m(...) __riscv_vsuxseg3ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i8mf2(...) __riscv_vsuxseg3ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i8mf2_m(...) __riscv_vsuxseg3ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i8mf4(...) __riscv_vsuxseg3ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i8mf4_m(...) __riscv_vsuxseg3ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i8mf8(...) __riscv_vsuxseg3ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_i8mf8_m(...) __riscv_vsuxseg3ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u16m1(...) __riscv_vsuxseg3ei64_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u16m1_m(...) __riscv_vsuxseg3ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u16m2(...) __riscv_vsuxseg3ei64_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u16m2_m(...) __riscv_vsuxseg3ei64_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u16mf2(...) __riscv_vsuxseg3ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u16mf2_m(...) __riscv_vsuxseg3ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u16mf4(...) __riscv_vsuxseg3ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u16mf4_m(...) __riscv_vsuxseg3ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u32m1(...) __riscv_vsuxseg3ei64_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u32m1_m(...) __riscv_vsuxseg3ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u32m2(...) __riscv_vsuxseg3ei64_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u32m2_m(...) __riscv_vsuxseg3ei64_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u32mf2(...) __riscv_vsuxseg3ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u32mf2_m(...) __riscv_vsuxseg3ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u64m1(...) __riscv_vsuxseg3ei64_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u64m1_m(...) __riscv_vsuxseg3ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u64m2(...) __riscv_vsuxseg3ei64_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u64m2_m(...) __riscv_vsuxseg3ei64_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u8m1(...) __riscv_vsuxseg3ei64_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u8m1_m(...) __riscv_vsuxseg3ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u8mf2(...) __riscv_vsuxseg3ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u8mf2_m(...) __riscv_vsuxseg3ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u8mf4(...) __riscv_vsuxseg3ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u8mf4_m(...) __riscv_vsuxseg3ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u8mf8(...) __riscv_vsuxseg3ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg3ei64_v_u8mf8_m(...) __riscv_vsuxseg3ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f16m1(...) __riscv_vsuxseg3ei8_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f16m1_m(...) __riscv_vsuxseg3ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f16m2(...) __riscv_vsuxseg3ei8_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f16m2_m(...) __riscv_vsuxseg3ei8_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f16mf2(...) __riscv_vsuxseg3ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f16mf2_m(...) __riscv_vsuxseg3ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f16mf4(...) __riscv_vsuxseg3ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f16mf4_m(...) __riscv_vsuxseg3ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f32m1(...) __riscv_vsuxseg3ei8_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f32m1_m(...) __riscv_vsuxseg3ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f32m2(...) __riscv_vsuxseg3ei8_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f32m2_m(...) __riscv_vsuxseg3ei8_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f32mf2(...) __riscv_vsuxseg3ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f32mf2_m(...) __riscv_vsuxseg3ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f64m1(...) __riscv_vsuxseg3ei8_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f64m1_m(...) __riscv_vsuxseg3ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f64m2(...) __riscv_vsuxseg3ei8_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_f64m2_m(...) __riscv_vsuxseg3ei8_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i16m1(...) __riscv_vsuxseg3ei8_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i16m1_m(...) __riscv_vsuxseg3ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i16m2(...) __riscv_vsuxseg3ei8_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i16m2_m(...) __riscv_vsuxseg3ei8_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i16mf2(...) __riscv_vsuxseg3ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i16mf2_m(...) __riscv_vsuxseg3ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i16mf4(...) __riscv_vsuxseg3ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i16mf4_m(...) __riscv_vsuxseg3ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i32m1(...) __riscv_vsuxseg3ei8_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i32m1_m(...) __riscv_vsuxseg3ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i32m2(...) __riscv_vsuxseg3ei8_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i32m2_m(...) __riscv_vsuxseg3ei8_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i32mf2(...) __riscv_vsuxseg3ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i32mf2_m(...) __riscv_vsuxseg3ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i64m1(...) __riscv_vsuxseg3ei8_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i64m1_m(...) __riscv_vsuxseg3ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i64m2(...) __riscv_vsuxseg3ei8_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i64m2_m(...) __riscv_vsuxseg3ei8_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8m1(...) __riscv_vsuxseg3ei8_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8m1_m(...) __riscv_vsuxseg3ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8m2(...) __riscv_vsuxseg3ei8_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8m2_m(...) __riscv_vsuxseg3ei8_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8mf2(...) __riscv_vsuxseg3ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8mf2_m(...) __riscv_vsuxseg3ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8mf4(...) __riscv_vsuxseg3ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8mf4_m(...) __riscv_vsuxseg3ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8mf8(...) __riscv_vsuxseg3ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_i8mf8_m(...) __riscv_vsuxseg3ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u16m1(...) __riscv_vsuxseg3ei8_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u16m1_m(...) __riscv_vsuxseg3ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u16m2(...) __riscv_vsuxseg3ei8_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u16m2_m(...) __riscv_vsuxseg3ei8_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u16mf2(...) __riscv_vsuxseg3ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u16mf2_m(...) __riscv_vsuxseg3ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u16mf4(...) __riscv_vsuxseg3ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u16mf4_m(...) __riscv_vsuxseg3ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u32m1(...) __riscv_vsuxseg3ei8_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u32m1_m(...) __riscv_vsuxseg3ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u32m2(...) __riscv_vsuxseg3ei8_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u32m2_m(...) __riscv_vsuxseg3ei8_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u32mf2(...) __riscv_vsuxseg3ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u32mf2_m(...) __riscv_vsuxseg3ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u64m1(...) __riscv_vsuxseg3ei8_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u64m1_m(...) __riscv_vsuxseg3ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u64m2(...) __riscv_vsuxseg3ei8_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u64m2_m(...) __riscv_vsuxseg3ei8_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8m1(...) __riscv_vsuxseg3ei8_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8m1_m(...) __riscv_vsuxseg3ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8m2(...) __riscv_vsuxseg3ei8_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8m2_m(...) __riscv_vsuxseg3ei8_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8mf2(...) __riscv_vsuxseg3ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8mf2_m(...) __riscv_vsuxseg3ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8mf4(...) __riscv_vsuxseg3ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8mf4_m(...) __riscv_vsuxseg3ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8mf8(...) __riscv_vsuxseg3ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg3ei8_v_u8mf8_m(...) __riscv_vsuxseg3ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f16m1(...) __riscv_vsuxseg4ei16_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f16m1_m(...) __riscv_vsuxseg4ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f16m2(...) __riscv_vsuxseg4ei16_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f16m2_m(...) __riscv_vsuxseg4ei16_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f16mf2(...) __riscv_vsuxseg4ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f16mf2_m(...) __riscv_vsuxseg4ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f16mf4(...) __riscv_vsuxseg4ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f16mf4_m(...) __riscv_vsuxseg4ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f32m1(...) __riscv_vsuxseg4ei16_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f32m1_m(...) __riscv_vsuxseg4ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f32m2(...) __riscv_vsuxseg4ei16_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f32m2_m(...) __riscv_vsuxseg4ei16_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f32mf2(...) __riscv_vsuxseg4ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f32mf2_m(...) __riscv_vsuxseg4ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f64m1(...) __riscv_vsuxseg4ei16_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f64m1_m(...) __riscv_vsuxseg4ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f64m2(...) __riscv_vsuxseg4ei16_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_f64m2_m(...) __riscv_vsuxseg4ei16_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i16m1(...) __riscv_vsuxseg4ei16_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i16m1_m(...) __riscv_vsuxseg4ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i16m2(...) __riscv_vsuxseg4ei16_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i16m2_m(...) __riscv_vsuxseg4ei16_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i16mf2(...) __riscv_vsuxseg4ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i16mf2_m(...) __riscv_vsuxseg4ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i16mf4(...) __riscv_vsuxseg4ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i16mf4_m(...) __riscv_vsuxseg4ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i32m1(...) __riscv_vsuxseg4ei16_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i32m1_m(...) __riscv_vsuxseg4ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i32m2(...) __riscv_vsuxseg4ei16_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i32m2_m(...) __riscv_vsuxseg4ei16_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i32mf2(...) __riscv_vsuxseg4ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i32mf2_m(...) __riscv_vsuxseg4ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i64m1(...) __riscv_vsuxseg4ei16_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i64m1_m(...) __riscv_vsuxseg4ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i64m2(...) __riscv_vsuxseg4ei16_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i64m2_m(...) __riscv_vsuxseg4ei16_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8m1(...) __riscv_vsuxseg4ei16_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8m1_m(...) __riscv_vsuxseg4ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8m2(...) __riscv_vsuxseg4ei16_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8m2_m(...) __riscv_vsuxseg4ei16_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8mf2(...) __riscv_vsuxseg4ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8mf2_m(...) __riscv_vsuxseg4ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8mf4(...) __riscv_vsuxseg4ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8mf4_m(...) __riscv_vsuxseg4ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8mf8(...) __riscv_vsuxseg4ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_i8mf8_m(...) __riscv_vsuxseg4ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u16m1(...) __riscv_vsuxseg4ei16_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u16m1_m(...) __riscv_vsuxseg4ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u16m2(...) __riscv_vsuxseg4ei16_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u16m2_m(...) __riscv_vsuxseg4ei16_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u16mf2(...) __riscv_vsuxseg4ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u16mf2_m(...) __riscv_vsuxseg4ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u16mf4(...) __riscv_vsuxseg4ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u16mf4_m(...) __riscv_vsuxseg4ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u32m1(...) __riscv_vsuxseg4ei16_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u32m1_m(...) __riscv_vsuxseg4ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u32m2(...) __riscv_vsuxseg4ei16_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u32m2_m(...) __riscv_vsuxseg4ei16_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u32mf2(...) __riscv_vsuxseg4ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u32mf2_m(...) __riscv_vsuxseg4ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u64m1(...) __riscv_vsuxseg4ei16_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u64m1_m(...) __riscv_vsuxseg4ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u64m2(...) __riscv_vsuxseg4ei16_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u64m2_m(...) __riscv_vsuxseg4ei16_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8m1(...) __riscv_vsuxseg4ei16_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8m1_m(...) __riscv_vsuxseg4ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8m2(...) __riscv_vsuxseg4ei16_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8m2_m(...) __riscv_vsuxseg4ei16_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8mf2(...) __riscv_vsuxseg4ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8mf2_m(...) __riscv_vsuxseg4ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8mf4(...) __riscv_vsuxseg4ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8mf4_m(...) __riscv_vsuxseg4ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8mf8(...) __riscv_vsuxseg4ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg4ei16_v_u8mf8_m(...) __riscv_vsuxseg4ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f16m1(...) __riscv_vsuxseg4ei32_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f16m1_m(...) __riscv_vsuxseg4ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f16m2(...) __riscv_vsuxseg4ei32_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f16m2_m(...) __riscv_vsuxseg4ei32_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f16mf2(...) __riscv_vsuxseg4ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f16mf2_m(...) __riscv_vsuxseg4ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f16mf4(...) __riscv_vsuxseg4ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f16mf4_m(...) __riscv_vsuxseg4ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f32m1(...) __riscv_vsuxseg4ei32_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f32m1_m(...) __riscv_vsuxseg4ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f32m2(...) __riscv_vsuxseg4ei32_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f32m2_m(...) __riscv_vsuxseg4ei32_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f32mf2(...) __riscv_vsuxseg4ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f32mf2_m(...) __riscv_vsuxseg4ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f64m1(...) __riscv_vsuxseg4ei32_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f64m1_m(...) __riscv_vsuxseg4ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f64m2(...) __riscv_vsuxseg4ei32_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_f64m2_m(...) __riscv_vsuxseg4ei32_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i16m1(...) __riscv_vsuxseg4ei32_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i16m1_m(...) __riscv_vsuxseg4ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i16m2(...) __riscv_vsuxseg4ei32_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i16m2_m(...) __riscv_vsuxseg4ei32_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i16mf2(...) __riscv_vsuxseg4ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i16mf2_m(...) __riscv_vsuxseg4ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i16mf4(...) __riscv_vsuxseg4ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i16mf4_m(...) __riscv_vsuxseg4ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i32m1(...) __riscv_vsuxseg4ei32_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i32m1_m(...) __riscv_vsuxseg4ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i32m2(...) __riscv_vsuxseg4ei32_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i32m2_m(...) __riscv_vsuxseg4ei32_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i32mf2(...) __riscv_vsuxseg4ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i32mf2_m(...) __riscv_vsuxseg4ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i64m1(...) __riscv_vsuxseg4ei32_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i64m1_m(...) __riscv_vsuxseg4ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i64m2(...) __riscv_vsuxseg4ei32_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i64m2_m(...) __riscv_vsuxseg4ei32_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8m1(...) __riscv_vsuxseg4ei32_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8m1_m(...) __riscv_vsuxseg4ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8m2(...) __riscv_vsuxseg4ei32_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8m2_m(...) __riscv_vsuxseg4ei32_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8mf2(...) __riscv_vsuxseg4ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8mf2_m(...) __riscv_vsuxseg4ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8mf4(...) __riscv_vsuxseg4ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8mf4_m(...) __riscv_vsuxseg4ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8mf8(...) __riscv_vsuxseg4ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_i8mf8_m(...) __riscv_vsuxseg4ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u16m1(...) __riscv_vsuxseg4ei32_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u16m1_m(...) __riscv_vsuxseg4ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u16m2(...) __riscv_vsuxseg4ei32_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u16m2_m(...) __riscv_vsuxseg4ei32_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u16mf2(...) __riscv_vsuxseg4ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u16mf2_m(...) __riscv_vsuxseg4ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u16mf4(...) __riscv_vsuxseg4ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u16mf4_m(...) __riscv_vsuxseg4ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u32m1(...) __riscv_vsuxseg4ei32_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u32m1_m(...) __riscv_vsuxseg4ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u32m2(...) __riscv_vsuxseg4ei32_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u32m2_m(...) __riscv_vsuxseg4ei32_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u32mf2(...) __riscv_vsuxseg4ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u32mf2_m(...) __riscv_vsuxseg4ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u64m1(...) __riscv_vsuxseg4ei32_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u64m1_m(...) __riscv_vsuxseg4ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u64m2(...) __riscv_vsuxseg4ei32_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u64m2_m(...) __riscv_vsuxseg4ei32_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8m1(...) __riscv_vsuxseg4ei32_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8m1_m(...) __riscv_vsuxseg4ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8m2(...) __riscv_vsuxseg4ei32_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8m2_m(...) __riscv_vsuxseg4ei32_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8mf2(...) __riscv_vsuxseg4ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8mf2_m(...) __riscv_vsuxseg4ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8mf4(...) __riscv_vsuxseg4ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8mf4_m(...) __riscv_vsuxseg4ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8mf8(...) __riscv_vsuxseg4ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg4ei32_v_u8mf8_m(...) __riscv_vsuxseg4ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f16m1(...) __riscv_vsuxseg4ei64_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f16m1_m(...) __riscv_vsuxseg4ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f16m2(...) __riscv_vsuxseg4ei64_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f16m2_m(...) __riscv_vsuxseg4ei64_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f16mf2(...) __riscv_vsuxseg4ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f16mf2_m(...) __riscv_vsuxseg4ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f16mf4(...) __riscv_vsuxseg4ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f16mf4_m(...) __riscv_vsuxseg4ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f32m1(...) __riscv_vsuxseg4ei64_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f32m1_m(...) __riscv_vsuxseg4ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f32m2(...) __riscv_vsuxseg4ei64_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f32m2_m(...) __riscv_vsuxseg4ei64_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f32mf2(...) __riscv_vsuxseg4ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f32mf2_m(...) __riscv_vsuxseg4ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f64m1(...) __riscv_vsuxseg4ei64_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f64m1_m(...) __riscv_vsuxseg4ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f64m2(...) __riscv_vsuxseg4ei64_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_f64m2_m(...) __riscv_vsuxseg4ei64_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i16m1(...) __riscv_vsuxseg4ei64_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i16m1_m(...) __riscv_vsuxseg4ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i16m2(...) __riscv_vsuxseg4ei64_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i16m2_m(...) __riscv_vsuxseg4ei64_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i16mf2(...) __riscv_vsuxseg4ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i16mf2_m(...) __riscv_vsuxseg4ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i16mf4(...) __riscv_vsuxseg4ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i16mf4_m(...) __riscv_vsuxseg4ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i32m1(...) __riscv_vsuxseg4ei64_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i32m1_m(...) __riscv_vsuxseg4ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i32m2(...) __riscv_vsuxseg4ei64_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i32m2_m(...) __riscv_vsuxseg4ei64_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i32mf2(...) __riscv_vsuxseg4ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i32mf2_m(...) __riscv_vsuxseg4ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i64m1(...) __riscv_vsuxseg4ei64_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i64m1_m(...) __riscv_vsuxseg4ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i64m2(...) __riscv_vsuxseg4ei64_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i64m2_m(...) __riscv_vsuxseg4ei64_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i8m1(...) __riscv_vsuxseg4ei64_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i8m1_m(...) __riscv_vsuxseg4ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i8mf2(...) __riscv_vsuxseg4ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i8mf2_m(...) __riscv_vsuxseg4ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i8mf4(...) __riscv_vsuxseg4ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i8mf4_m(...) __riscv_vsuxseg4ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i8mf8(...) __riscv_vsuxseg4ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_i8mf8_m(...) __riscv_vsuxseg4ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u16m1(...) __riscv_vsuxseg4ei64_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u16m1_m(...) __riscv_vsuxseg4ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u16m2(...) __riscv_vsuxseg4ei64_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u16m2_m(...) __riscv_vsuxseg4ei64_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u16mf2(...) __riscv_vsuxseg4ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u16mf2_m(...) __riscv_vsuxseg4ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u16mf4(...) __riscv_vsuxseg4ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u16mf4_m(...) __riscv_vsuxseg4ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u32m1(...) __riscv_vsuxseg4ei64_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u32m1_m(...) __riscv_vsuxseg4ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u32m2(...) __riscv_vsuxseg4ei64_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u32m2_m(...) __riscv_vsuxseg4ei64_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u32mf2(...) __riscv_vsuxseg4ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u32mf2_m(...) __riscv_vsuxseg4ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u64m1(...) __riscv_vsuxseg4ei64_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u64m1_m(...) __riscv_vsuxseg4ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u64m2(...) __riscv_vsuxseg4ei64_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u64m2_m(...) __riscv_vsuxseg4ei64_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u8m1(...) __riscv_vsuxseg4ei64_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u8m1_m(...) __riscv_vsuxseg4ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u8mf2(...) __riscv_vsuxseg4ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u8mf2_m(...) __riscv_vsuxseg4ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u8mf4(...) __riscv_vsuxseg4ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u8mf4_m(...) __riscv_vsuxseg4ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u8mf8(...) __riscv_vsuxseg4ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg4ei64_v_u8mf8_m(...) __riscv_vsuxseg4ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f16m1(...) __riscv_vsuxseg4ei8_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f16m1_m(...) __riscv_vsuxseg4ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f16m2(...) __riscv_vsuxseg4ei8_v_f16m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f16m2_m(...) __riscv_vsuxseg4ei8_v_f16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f16mf2(...) __riscv_vsuxseg4ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f16mf2_m(...) __riscv_vsuxseg4ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f16mf4(...) __riscv_vsuxseg4ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f16mf4_m(...) __riscv_vsuxseg4ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f32m1(...) __riscv_vsuxseg4ei8_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f32m1_m(...) __riscv_vsuxseg4ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f32m2(...) __riscv_vsuxseg4ei8_v_f32m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f32m2_m(...) __riscv_vsuxseg4ei8_v_f32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f32mf2(...) __riscv_vsuxseg4ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f32mf2_m(...) __riscv_vsuxseg4ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f64m1(...) __riscv_vsuxseg4ei8_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f64m1_m(...) __riscv_vsuxseg4ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f64m2(...) __riscv_vsuxseg4ei8_v_f64m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_f64m2_m(...) __riscv_vsuxseg4ei8_v_f64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i16m1(...) __riscv_vsuxseg4ei8_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i16m1_m(...) __riscv_vsuxseg4ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i16m2(...) __riscv_vsuxseg4ei8_v_i16m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i16m2_m(...) __riscv_vsuxseg4ei8_v_i16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i16mf2(...) __riscv_vsuxseg4ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i16mf2_m(...) __riscv_vsuxseg4ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i16mf4(...) __riscv_vsuxseg4ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i16mf4_m(...) __riscv_vsuxseg4ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i32m1(...) __riscv_vsuxseg4ei8_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i32m1_m(...) __riscv_vsuxseg4ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i32m2(...) __riscv_vsuxseg4ei8_v_i32m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i32m2_m(...) __riscv_vsuxseg4ei8_v_i32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i32mf2(...) __riscv_vsuxseg4ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i32mf2_m(...) __riscv_vsuxseg4ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i64m1(...) __riscv_vsuxseg4ei8_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i64m1_m(...) __riscv_vsuxseg4ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i64m2(...) __riscv_vsuxseg4ei8_v_i64m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i64m2_m(...) __riscv_vsuxseg4ei8_v_i64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8m1(...) __riscv_vsuxseg4ei8_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8m1_m(...) __riscv_vsuxseg4ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8m2(...) __riscv_vsuxseg4ei8_v_i8m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8m2_m(...) __riscv_vsuxseg4ei8_v_i8m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8mf2(...) __riscv_vsuxseg4ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8mf2_m(...) __riscv_vsuxseg4ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8mf4(...) __riscv_vsuxseg4ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8mf4_m(...) __riscv_vsuxseg4ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8mf8(...) __riscv_vsuxseg4ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_i8mf8_m(...) __riscv_vsuxseg4ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u16m1(...) __riscv_vsuxseg4ei8_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u16m1_m(...) __riscv_vsuxseg4ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u16m2(...) __riscv_vsuxseg4ei8_v_u16m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u16m2_m(...) __riscv_vsuxseg4ei8_v_u16m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u16mf2(...) __riscv_vsuxseg4ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u16mf2_m(...) __riscv_vsuxseg4ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u16mf4(...) __riscv_vsuxseg4ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u16mf4_m(...) __riscv_vsuxseg4ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u32m1(...) __riscv_vsuxseg4ei8_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u32m1_m(...) __riscv_vsuxseg4ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u32m2(...) __riscv_vsuxseg4ei8_v_u32m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u32m2_m(...) __riscv_vsuxseg4ei8_v_u32m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u32mf2(...) __riscv_vsuxseg4ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u32mf2_m(...) __riscv_vsuxseg4ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u64m1(...) __riscv_vsuxseg4ei8_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u64m1_m(...) __riscv_vsuxseg4ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u64m2(...) __riscv_vsuxseg4ei8_v_u64m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u64m2_m(...) __riscv_vsuxseg4ei8_v_u64m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8m1(...) __riscv_vsuxseg4ei8_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8m1_m(...) __riscv_vsuxseg4ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8m2(...) __riscv_vsuxseg4ei8_v_u8m2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8m2_m(...) __riscv_vsuxseg4ei8_v_u8m2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8mf2(...) __riscv_vsuxseg4ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8mf2_m(...) __riscv_vsuxseg4ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8mf4(...) __riscv_vsuxseg4ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8mf4_m(...) __riscv_vsuxseg4ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8mf8(...) __riscv_vsuxseg4ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg4ei8_v_u8mf8_m(...) __riscv_vsuxseg4ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f16m1(...) __riscv_vsuxseg5ei16_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f16m1_m(...) __riscv_vsuxseg5ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f16mf2(...) __riscv_vsuxseg5ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f16mf2_m(...) __riscv_vsuxseg5ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f16mf4(...) __riscv_vsuxseg5ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f16mf4_m(...) __riscv_vsuxseg5ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f32m1(...) __riscv_vsuxseg5ei16_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f32m1_m(...) __riscv_vsuxseg5ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f32mf2(...) __riscv_vsuxseg5ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f32mf2_m(...) __riscv_vsuxseg5ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f64m1(...) __riscv_vsuxseg5ei16_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_f64m1_m(...) __riscv_vsuxseg5ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i16m1(...) __riscv_vsuxseg5ei16_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i16m1_m(...) __riscv_vsuxseg5ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i16mf2(...) __riscv_vsuxseg5ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i16mf2_m(...) __riscv_vsuxseg5ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i16mf4(...) __riscv_vsuxseg5ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i16mf4_m(...) __riscv_vsuxseg5ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i32m1(...) __riscv_vsuxseg5ei16_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i32m1_m(...) __riscv_vsuxseg5ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i32mf2(...) __riscv_vsuxseg5ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i32mf2_m(...) __riscv_vsuxseg5ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i64m1(...) __riscv_vsuxseg5ei16_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i64m1_m(...) __riscv_vsuxseg5ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i8m1(...) __riscv_vsuxseg5ei16_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i8m1_m(...) __riscv_vsuxseg5ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i8mf2(...) __riscv_vsuxseg5ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i8mf2_m(...) __riscv_vsuxseg5ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i8mf4(...) __riscv_vsuxseg5ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i8mf4_m(...) __riscv_vsuxseg5ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i8mf8(...) __riscv_vsuxseg5ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_i8mf8_m(...) __riscv_vsuxseg5ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u16m1(...) __riscv_vsuxseg5ei16_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u16m1_m(...) __riscv_vsuxseg5ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u16mf2(...) __riscv_vsuxseg5ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u16mf2_m(...) __riscv_vsuxseg5ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u16mf4(...) __riscv_vsuxseg5ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u16mf4_m(...) __riscv_vsuxseg5ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u32m1(...) __riscv_vsuxseg5ei16_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u32m1_m(...) __riscv_vsuxseg5ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u32mf2(...) __riscv_vsuxseg5ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u32mf2_m(...) __riscv_vsuxseg5ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u64m1(...) __riscv_vsuxseg5ei16_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u64m1_m(...) __riscv_vsuxseg5ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u8m1(...) __riscv_vsuxseg5ei16_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u8m1_m(...) __riscv_vsuxseg5ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u8mf2(...) __riscv_vsuxseg5ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u8mf2_m(...) __riscv_vsuxseg5ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u8mf4(...) __riscv_vsuxseg5ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u8mf4_m(...) __riscv_vsuxseg5ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u8mf8(...) __riscv_vsuxseg5ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg5ei16_v_u8mf8_m(...) __riscv_vsuxseg5ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f16m1(...) __riscv_vsuxseg5ei32_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f16m1_m(...) __riscv_vsuxseg5ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f16mf2(...) __riscv_vsuxseg5ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f16mf2_m(...) __riscv_vsuxseg5ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f16mf4(...) __riscv_vsuxseg5ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f16mf4_m(...) __riscv_vsuxseg5ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f32m1(...) __riscv_vsuxseg5ei32_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f32m1_m(...) __riscv_vsuxseg5ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f32mf2(...) __riscv_vsuxseg5ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f32mf2_m(...) __riscv_vsuxseg5ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f64m1(...) __riscv_vsuxseg5ei32_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_f64m1_m(...) __riscv_vsuxseg5ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i16m1(...) __riscv_vsuxseg5ei32_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i16m1_m(...) __riscv_vsuxseg5ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i16mf2(...) __riscv_vsuxseg5ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i16mf2_m(...) __riscv_vsuxseg5ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i16mf4(...) __riscv_vsuxseg5ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i16mf4_m(...) __riscv_vsuxseg5ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i32m1(...) __riscv_vsuxseg5ei32_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i32m1_m(...) __riscv_vsuxseg5ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i32mf2(...) __riscv_vsuxseg5ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i32mf2_m(...) __riscv_vsuxseg5ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i64m1(...) __riscv_vsuxseg5ei32_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i64m1_m(...) __riscv_vsuxseg5ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i8m1(...) __riscv_vsuxseg5ei32_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i8m1_m(...) __riscv_vsuxseg5ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i8mf2(...) __riscv_vsuxseg5ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i8mf2_m(...) __riscv_vsuxseg5ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i8mf4(...) __riscv_vsuxseg5ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i8mf4_m(...) __riscv_vsuxseg5ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i8mf8(...) __riscv_vsuxseg5ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_i8mf8_m(...) __riscv_vsuxseg5ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u16m1(...) __riscv_vsuxseg5ei32_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u16m1_m(...) __riscv_vsuxseg5ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u16mf2(...) __riscv_vsuxseg5ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u16mf2_m(...) __riscv_vsuxseg5ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u16mf4(...) __riscv_vsuxseg5ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u16mf4_m(...) __riscv_vsuxseg5ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u32m1(...) __riscv_vsuxseg5ei32_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u32m1_m(...) __riscv_vsuxseg5ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u32mf2(...) __riscv_vsuxseg5ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u32mf2_m(...) __riscv_vsuxseg5ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u64m1(...) __riscv_vsuxseg5ei32_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u64m1_m(...) __riscv_vsuxseg5ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u8m1(...) __riscv_vsuxseg5ei32_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u8m1_m(...) __riscv_vsuxseg5ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u8mf2(...) __riscv_vsuxseg5ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u8mf2_m(...) __riscv_vsuxseg5ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u8mf4(...) __riscv_vsuxseg5ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u8mf4_m(...) __riscv_vsuxseg5ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u8mf8(...) __riscv_vsuxseg5ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg5ei32_v_u8mf8_m(...) __riscv_vsuxseg5ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f16m1(...) __riscv_vsuxseg5ei64_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f16m1_m(...) __riscv_vsuxseg5ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f16mf2(...) __riscv_vsuxseg5ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f16mf2_m(...) __riscv_vsuxseg5ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f16mf4(...) __riscv_vsuxseg5ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f16mf4_m(...) __riscv_vsuxseg5ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f32m1(...) __riscv_vsuxseg5ei64_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f32m1_m(...) __riscv_vsuxseg5ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f32mf2(...) __riscv_vsuxseg5ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f32mf2_m(...) __riscv_vsuxseg5ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f64m1(...) __riscv_vsuxseg5ei64_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_f64m1_m(...) __riscv_vsuxseg5ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i16m1(...) __riscv_vsuxseg5ei64_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i16m1_m(...) __riscv_vsuxseg5ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i16mf2(...) __riscv_vsuxseg5ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i16mf2_m(...) __riscv_vsuxseg5ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i16mf4(...) __riscv_vsuxseg5ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i16mf4_m(...) __riscv_vsuxseg5ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i32m1(...) __riscv_vsuxseg5ei64_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i32m1_m(...) __riscv_vsuxseg5ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i32mf2(...) __riscv_vsuxseg5ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i32mf2_m(...) __riscv_vsuxseg5ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i64m1(...) __riscv_vsuxseg5ei64_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i64m1_m(...) __riscv_vsuxseg5ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i8m1(...) __riscv_vsuxseg5ei64_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i8m1_m(...) __riscv_vsuxseg5ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i8mf2(...) __riscv_vsuxseg5ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i8mf2_m(...) __riscv_vsuxseg5ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i8mf4(...) __riscv_vsuxseg5ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i8mf4_m(...) __riscv_vsuxseg5ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i8mf8(...) __riscv_vsuxseg5ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_i8mf8_m(...) __riscv_vsuxseg5ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u16m1(...) __riscv_vsuxseg5ei64_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u16m1_m(...) __riscv_vsuxseg5ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u16mf2(...) __riscv_vsuxseg5ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u16mf2_m(...) __riscv_vsuxseg5ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u16mf4(...) __riscv_vsuxseg5ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u16mf4_m(...) __riscv_vsuxseg5ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u32m1(...) __riscv_vsuxseg5ei64_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u32m1_m(...) __riscv_vsuxseg5ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u32mf2(...) __riscv_vsuxseg5ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u32mf2_m(...) __riscv_vsuxseg5ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u64m1(...) __riscv_vsuxseg5ei64_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u64m1_m(...) __riscv_vsuxseg5ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u8m1(...) __riscv_vsuxseg5ei64_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u8m1_m(...) __riscv_vsuxseg5ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u8mf2(...) __riscv_vsuxseg5ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u8mf2_m(...) __riscv_vsuxseg5ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u8mf4(...) __riscv_vsuxseg5ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u8mf4_m(...) __riscv_vsuxseg5ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u8mf8(...) __riscv_vsuxseg5ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg5ei64_v_u8mf8_m(...) __riscv_vsuxseg5ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f16m1(...) __riscv_vsuxseg5ei8_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f16m1_m(...) __riscv_vsuxseg5ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f16mf2(...) __riscv_vsuxseg5ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f16mf2_m(...) __riscv_vsuxseg5ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f16mf4(...) __riscv_vsuxseg5ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f16mf4_m(...) __riscv_vsuxseg5ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f32m1(...) __riscv_vsuxseg5ei8_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f32m1_m(...) __riscv_vsuxseg5ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f32mf2(...) __riscv_vsuxseg5ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f32mf2_m(...) __riscv_vsuxseg5ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f64m1(...) __riscv_vsuxseg5ei8_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_f64m1_m(...) __riscv_vsuxseg5ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i16m1(...) __riscv_vsuxseg5ei8_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i16m1_m(...) __riscv_vsuxseg5ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i16mf2(...) __riscv_vsuxseg5ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i16mf2_m(...) __riscv_vsuxseg5ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i16mf4(...) __riscv_vsuxseg5ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i16mf4_m(...) __riscv_vsuxseg5ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i32m1(...) __riscv_vsuxseg5ei8_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i32m1_m(...) __riscv_vsuxseg5ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i32mf2(...) __riscv_vsuxseg5ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i32mf2_m(...) __riscv_vsuxseg5ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i64m1(...) __riscv_vsuxseg5ei8_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i64m1_m(...) __riscv_vsuxseg5ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i8m1(...) __riscv_vsuxseg5ei8_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i8m1_m(...) __riscv_vsuxseg5ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i8mf2(...) __riscv_vsuxseg5ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i8mf2_m(...) __riscv_vsuxseg5ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i8mf4(...) __riscv_vsuxseg5ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i8mf4_m(...) __riscv_vsuxseg5ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i8mf8(...) __riscv_vsuxseg5ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_i8mf8_m(...) __riscv_vsuxseg5ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u16m1(...) __riscv_vsuxseg5ei8_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u16m1_m(...) __riscv_vsuxseg5ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u16mf2(...) __riscv_vsuxseg5ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u16mf2_m(...) __riscv_vsuxseg5ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u16mf4(...) __riscv_vsuxseg5ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u16mf4_m(...) __riscv_vsuxseg5ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u32m1(...) __riscv_vsuxseg5ei8_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u32m1_m(...) __riscv_vsuxseg5ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u32mf2(...) __riscv_vsuxseg5ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u32mf2_m(...) __riscv_vsuxseg5ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u64m1(...) __riscv_vsuxseg5ei8_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u64m1_m(...) __riscv_vsuxseg5ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u8m1(...) __riscv_vsuxseg5ei8_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u8m1_m(...) __riscv_vsuxseg5ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u8mf2(...) __riscv_vsuxseg5ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u8mf2_m(...) __riscv_vsuxseg5ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u8mf4(...) __riscv_vsuxseg5ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u8mf4_m(...) __riscv_vsuxseg5ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u8mf8(...) __riscv_vsuxseg5ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg5ei8_v_u8mf8_m(...) __riscv_vsuxseg5ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f16m1(...) __riscv_vsuxseg6ei16_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f16m1_m(...) __riscv_vsuxseg6ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f16mf2(...) __riscv_vsuxseg6ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f16mf2_m(...) __riscv_vsuxseg6ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f16mf4(...) __riscv_vsuxseg6ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f16mf4_m(...) __riscv_vsuxseg6ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f32m1(...) __riscv_vsuxseg6ei16_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f32m1_m(...) __riscv_vsuxseg6ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f32mf2(...) __riscv_vsuxseg6ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f32mf2_m(...) __riscv_vsuxseg6ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f64m1(...) __riscv_vsuxseg6ei16_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_f64m1_m(...) __riscv_vsuxseg6ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i16m1(...) __riscv_vsuxseg6ei16_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i16m1_m(...) __riscv_vsuxseg6ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i16mf2(...) __riscv_vsuxseg6ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i16mf2_m(...) __riscv_vsuxseg6ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i16mf4(...) __riscv_vsuxseg6ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i16mf4_m(...) __riscv_vsuxseg6ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i32m1(...) __riscv_vsuxseg6ei16_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i32m1_m(...) __riscv_vsuxseg6ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i32mf2(...) __riscv_vsuxseg6ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i32mf2_m(...) __riscv_vsuxseg6ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i64m1(...) __riscv_vsuxseg6ei16_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i64m1_m(...) __riscv_vsuxseg6ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i8m1(...) __riscv_vsuxseg6ei16_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i8m1_m(...) __riscv_vsuxseg6ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i8mf2(...) __riscv_vsuxseg6ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i8mf2_m(...) __riscv_vsuxseg6ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i8mf4(...) __riscv_vsuxseg6ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i8mf4_m(...) __riscv_vsuxseg6ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i8mf8(...) __riscv_vsuxseg6ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_i8mf8_m(...) __riscv_vsuxseg6ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u16m1(...) __riscv_vsuxseg6ei16_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u16m1_m(...) __riscv_vsuxseg6ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u16mf2(...) __riscv_vsuxseg6ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u16mf2_m(...) __riscv_vsuxseg6ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u16mf4(...) __riscv_vsuxseg6ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u16mf4_m(...) __riscv_vsuxseg6ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u32m1(...) __riscv_vsuxseg6ei16_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u32m1_m(...) __riscv_vsuxseg6ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u32mf2(...) __riscv_vsuxseg6ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u32mf2_m(...) __riscv_vsuxseg6ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u64m1(...) __riscv_vsuxseg6ei16_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u64m1_m(...) __riscv_vsuxseg6ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u8m1(...) __riscv_vsuxseg6ei16_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u8m1_m(...) __riscv_vsuxseg6ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u8mf2(...) __riscv_vsuxseg6ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u8mf2_m(...) __riscv_vsuxseg6ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u8mf4(...) __riscv_vsuxseg6ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u8mf4_m(...) __riscv_vsuxseg6ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u8mf8(...) __riscv_vsuxseg6ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg6ei16_v_u8mf8_m(...) __riscv_vsuxseg6ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f16m1(...) __riscv_vsuxseg6ei32_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f16m1_m(...) __riscv_vsuxseg6ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f16mf2(...) __riscv_vsuxseg6ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f16mf2_m(...) __riscv_vsuxseg6ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f16mf4(...) __riscv_vsuxseg6ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f16mf4_m(...) __riscv_vsuxseg6ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f32m1(...) __riscv_vsuxseg6ei32_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f32m1_m(...) __riscv_vsuxseg6ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f32mf2(...) __riscv_vsuxseg6ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f32mf2_m(...) __riscv_vsuxseg6ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f64m1(...) __riscv_vsuxseg6ei32_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_f64m1_m(...) __riscv_vsuxseg6ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i16m1(...) __riscv_vsuxseg6ei32_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i16m1_m(...) __riscv_vsuxseg6ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i16mf2(...) __riscv_vsuxseg6ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i16mf2_m(...) __riscv_vsuxseg6ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i16mf4(...) __riscv_vsuxseg6ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i16mf4_m(...) __riscv_vsuxseg6ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i32m1(...) __riscv_vsuxseg6ei32_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i32m1_m(...) __riscv_vsuxseg6ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i32mf2(...) __riscv_vsuxseg6ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i32mf2_m(...) __riscv_vsuxseg6ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i64m1(...) __riscv_vsuxseg6ei32_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i64m1_m(...) __riscv_vsuxseg6ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i8m1(...) __riscv_vsuxseg6ei32_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i8m1_m(...) __riscv_vsuxseg6ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i8mf2(...) __riscv_vsuxseg6ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i8mf2_m(...) __riscv_vsuxseg6ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i8mf4(...) __riscv_vsuxseg6ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i8mf4_m(...) __riscv_vsuxseg6ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i8mf8(...) __riscv_vsuxseg6ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_i8mf8_m(...) __riscv_vsuxseg6ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u16m1(...) __riscv_vsuxseg6ei32_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u16m1_m(...) __riscv_vsuxseg6ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u16mf2(...) __riscv_vsuxseg6ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u16mf2_m(...) __riscv_vsuxseg6ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u16mf4(...) __riscv_vsuxseg6ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u16mf4_m(...) __riscv_vsuxseg6ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u32m1(...) __riscv_vsuxseg6ei32_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u32m1_m(...) __riscv_vsuxseg6ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u32mf2(...) __riscv_vsuxseg6ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u32mf2_m(...) __riscv_vsuxseg6ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u64m1(...) __riscv_vsuxseg6ei32_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u64m1_m(...) __riscv_vsuxseg6ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u8m1(...) __riscv_vsuxseg6ei32_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u8m1_m(...) __riscv_vsuxseg6ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u8mf2(...) __riscv_vsuxseg6ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u8mf2_m(...) __riscv_vsuxseg6ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u8mf4(...) __riscv_vsuxseg6ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u8mf4_m(...) __riscv_vsuxseg6ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u8mf8(...) __riscv_vsuxseg6ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg6ei32_v_u8mf8_m(...) __riscv_vsuxseg6ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f16m1(...) __riscv_vsuxseg6ei64_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f16m1_m(...) __riscv_vsuxseg6ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f16mf2(...) __riscv_vsuxseg6ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f16mf2_m(...) __riscv_vsuxseg6ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f16mf4(...) __riscv_vsuxseg6ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f16mf4_m(...) __riscv_vsuxseg6ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f32m1(...) __riscv_vsuxseg6ei64_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f32m1_m(...) __riscv_vsuxseg6ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f32mf2(...) __riscv_vsuxseg6ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f32mf2_m(...) __riscv_vsuxseg6ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f64m1(...) __riscv_vsuxseg6ei64_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_f64m1_m(...) __riscv_vsuxseg6ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i16m1(...) __riscv_vsuxseg6ei64_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i16m1_m(...) __riscv_vsuxseg6ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i16mf2(...) __riscv_vsuxseg6ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i16mf2_m(...) __riscv_vsuxseg6ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i16mf4(...) __riscv_vsuxseg6ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i16mf4_m(...) __riscv_vsuxseg6ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i32m1(...) __riscv_vsuxseg6ei64_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i32m1_m(...) __riscv_vsuxseg6ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i32mf2(...) __riscv_vsuxseg6ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i32mf2_m(...) __riscv_vsuxseg6ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i64m1(...) __riscv_vsuxseg6ei64_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i64m1_m(...) __riscv_vsuxseg6ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i8m1(...) __riscv_vsuxseg6ei64_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i8m1_m(...) __riscv_vsuxseg6ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i8mf2(...) __riscv_vsuxseg6ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i8mf2_m(...) __riscv_vsuxseg6ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i8mf4(...) __riscv_vsuxseg6ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i8mf4_m(...) __riscv_vsuxseg6ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i8mf8(...) __riscv_vsuxseg6ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_i8mf8_m(...) __riscv_vsuxseg6ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u16m1(...) __riscv_vsuxseg6ei64_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u16m1_m(...) __riscv_vsuxseg6ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u16mf2(...) __riscv_vsuxseg6ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u16mf2_m(...) __riscv_vsuxseg6ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u16mf4(...) __riscv_vsuxseg6ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u16mf4_m(...) __riscv_vsuxseg6ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u32m1(...) __riscv_vsuxseg6ei64_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u32m1_m(...) __riscv_vsuxseg6ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u32mf2(...) __riscv_vsuxseg6ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u32mf2_m(...) __riscv_vsuxseg6ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u64m1(...) __riscv_vsuxseg6ei64_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u64m1_m(...) __riscv_vsuxseg6ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u8m1(...) __riscv_vsuxseg6ei64_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u8m1_m(...) __riscv_vsuxseg6ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u8mf2(...) __riscv_vsuxseg6ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u8mf2_m(...) __riscv_vsuxseg6ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u8mf4(...) __riscv_vsuxseg6ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u8mf4_m(...) __riscv_vsuxseg6ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u8mf8(...) __riscv_vsuxseg6ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg6ei64_v_u8mf8_m(...) __riscv_vsuxseg6ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f16m1(...) __riscv_vsuxseg6ei8_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f16m1_m(...) __riscv_vsuxseg6ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f16mf2(...) __riscv_vsuxseg6ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f16mf2_m(...) __riscv_vsuxseg6ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f16mf4(...) __riscv_vsuxseg6ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f16mf4_m(...) __riscv_vsuxseg6ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f32m1(...) __riscv_vsuxseg6ei8_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f32m1_m(...) __riscv_vsuxseg6ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f32mf2(...) __riscv_vsuxseg6ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f32mf2_m(...) __riscv_vsuxseg6ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f64m1(...) __riscv_vsuxseg6ei8_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_f64m1_m(...) __riscv_vsuxseg6ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i16m1(...) __riscv_vsuxseg6ei8_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i16m1_m(...) __riscv_vsuxseg6ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i16mf2(...) __riscv_vsuxseg6ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i16mf2_m(...) __riscv_vsuxseg6ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i16mf4(...) __riscv_vsuxseg6ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i16mf4_m(...) __riscv_vsuxseg6ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i32m1(...) __riscv_vsuxseg6ei8_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i32m1_m(...) __riscv_vsuxseg6ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i32mf2(...) __riscv_vsuxseg6ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i32mf2_m(...) __riscv_vsuxseg6ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i64m1(...) __riscv_vsuxseg6ei8_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i64m1_m(...) __riscv_vsuxseg6ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i8m1(...) __riscv_vsuxseg6ei8_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i8m1_m(...) __riscv_vsuxseg6ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i8mf2(...) __riscv_vsuxseg6ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i8mf2_m(...) __riscv_vsuxseg6ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i8mf4(...) __riscv_vsuxseg6ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i8mf4_m(...) __riscv_vsuxseg6ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i8mf8(...) __riscv_vsuxseg6ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_i8mf8_m(...) __riscv_vsuxseg6ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u16m1(...) __riscv_vsuxseg6ei8_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u16m1_m(...) __riscv_vsuxseg6ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u16mf2(...) __riscv_vsuxseg6ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u16mf2_m(...) __riscv_vsuxseg6ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u16mf4(...) __riscv_vsuxseg6ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u16mf4_m(...) __riscv_vsuxseg6ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u32m1(...) __riscv_vsuxseg6ei8_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u32m1_m(...) __riscv_vsuxseg6ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u32mf2(...) __riscv_vsuxseg6ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u32mf2_m(...) __riscv_vsuxseg6ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u64m1(...) __riscv_vsuxseg6ei8_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u64m1_m(...) __riscv_vsuxseg6ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u8m1(...) __riscv_vsuxseg6ei8_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u8m1_m(...) __riscv_vsuxseg6ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u8mf2(...) __riscv_vsuxseg6ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u8mf2_m(...) __riscv_vsuxseg6ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u8mf4(...) __riscv_vsuxseg6ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u8mf4_m(...) __riscv_vsuxseg6ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u8mf8(...) __riscv_vsuxseg6ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg6ei8_v_u8mf8_m(...) __riscv_vsuxseg6ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f16m1(...) __riscv_vsuxseg7ei16_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f16m1_m(...) __riscv_vsuxseg7ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f16mf2(...) __riscv_vsuxseg7ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f16mf2_m(...) __riscv_vsuxseg7ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f16mf4(...) __riscv_vsuxseg7ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f16mf4_m(...) __riscv_vsuxseg7ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f32m1(...) __riscv_vsuxseg7ei16_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f32m1_m(...) __riscv_vsuxseg7ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f32mf2(...) __riscv_vsuxseg7ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f32mf2_m(...) __riscv_vsuxseg7ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f64m1(...) __riscv_vsuxseg7ei16_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_f64m1_m(...) __riscv_vsuxseg7ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i16m1(...) __riscv_vsuxseg7ei16_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i16m1_m(...) __riscv_vsuxseg7ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i16mf2(...) __riscv_vsuxseg7ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i16mf2_m(...) __riscv_vsuxseg7ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i16mf4(...) __riscv_vsuxseg7ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i16mf4_m(...) __riscv_vsuxseg7ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i32m1(...) __riscv_vsuxseg7ei16_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i32m1_m(...) __riscv_vsuxseg7ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i32mf2(...) __riscv_vsuxseg7ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i32mf2_m(...) __riscv_vsuxseg7ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i64m1(...) __riscv_vsuxseg7ei16_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i64m1_m(...) __riscv_vsuxseg7ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i8m1(...) __riscv_vsuxseg7ei16_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i8m1_m(...) __riscv_vsuxseg7ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i8mf2(...) __riscv_vsuxseg7ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i8mf2_m(...) __riscv_vsuxseg7ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i8mf4(...) __riscv_vsuxseg7ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i8mf4_m(...) __riscv_vsuxseg7ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i8mf8(...) __riscv_vsuxseg7ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_i8mf8_m(...) __riscv_vsuxseg7ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u16m1(...) __riscv_vsuxseg7ei16_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u16m1_m(...) __riscv_vsuxseg7ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u16mf2(...) __riscv_vsuxseg7ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u16mf2_m(...) __riscv_vsuxseg7ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u16mf4(...) __riscv_vsuxseg7ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u16mf4_m(...) __riscv_vsuxseg7ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u32m1(...) __riscv_vsuxseg7ei16_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u32m1_m(...) __riscv_vsuxseg7ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u32mf2(...) __riscv_vsuxseg7ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u32mf2_m(...) __riscv_vsuxseg7ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u64m1(...) __riscv_vsuxseg7ei16_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u64m1_m(...) __riscv_vsuxseg7ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u8m1(...) __riscv_vsuxseg7ei16_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u8m1_m(...) __riscv_vsuxseg7ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u8mf2(...) __riscv_vsuxseg7ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u8mf2_m(...) __riscv_vsuxseg7ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u8mf4(...) __riscv_vsuxseg7ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u8mf4_m(...) __riscv_vsuxseg7ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u8mf8(...) __riscv_vsuxseg7ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg7ei16_v_u8mf8_m(...) __riscv_vsuxseg7ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f16m1(...) __riscv_vsuxseg7ei32_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f16m1_m(...) __riscv_vsuxseg7ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f16mf2(...) __riscv_vsuxseg7ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f16mf2_m(...) __riscv_vsuxseg7ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f16mf4(...) __riscv_vsuxseg7ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f16mf4_m(...) __riscv_vsuxseg7ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f32m1(...) __riscv_vsuxseg7ei32_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f32m1_m(...) __riscv_vsuxseg7ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f32mf2(...) __riscv_vsuxseg7ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f32mf2_m(...) __riscv_vsuxseg7ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f64m1(...) __riscv_vsuxseg7ei32_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_f64m1_m(...) __riscv_vsuxseg7ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i16m1(...) __riscv_vsuxseg7ei32_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i16m1_m(...) __riscv_vsuxseg7ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i16mf2(...) __riscv_vsuxseg7ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i16mf2_m(...) __riscv_vsuxseg7ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i16mf4(...) __riscv_vsuxseg7ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i16mf4_m(...) __riscv_vsuxseg7ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i32m1(...) __riscv_vsuxseg7ei32_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i32m1_m(...) __riscv_vsuxseg7ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i32mf2(...) __riscv_vsuxseg7ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i32mf2_m(...) __riscv_vsuxseg7ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i64m1(...) __riscv_vsuxseg7ei32_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i64m1_m(...) __riscv_vsuxseg7ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i8m1(...) __riscv_vsuxseg7ei32_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i8m1_m(...) __riscv_vsuxseg7ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i8mf2(...) __riscv_vsuxseg7ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i8mf2_m(...) __riscv_vsuxseg7ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i8mf4(...) __riscv_vsuxseg7ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i8mf4_m(...) __riscv_vsuxseg7ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i8mf8(...) __riscv_vsuxseg7ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_i8mf8_m(...) __riscv_vsuxseg7ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u16m1(...) __riscv_vsuxseg7ei32_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u16m1_m(...) __riscv_vsuxseg7ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u16mf2(...) __riscv_vsuxseg7ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u16mf2_m(...) __riscv_vsuxseg7ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u16mf4(...) __riscv_vsuxseg7ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u16mf4_m(...) __riscv_vsuxseg7ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u32m1(...) __riscv_vsuxseg7ei32_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u32m1_m(...) __riscv_vsuxseg7ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u32mf2(...) __riscv_vsuxseg7ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u32mf2_m(...) __riscv_vsuxseg7ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u64m1(...) __riscv_vsuxseg7ei32_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u64m1_m(...) __riscv_vsuxseg7ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u8m1(...) __riscv_vsuxseg7ei32_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u8m1_m(...) __riscv_vsuxseg7ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u8mf2(...) __riscv_vsuxseg7ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u8mf2_m(...) __riscv_vsuxseg7ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u8mf4(...) __riscv_vsuxseg7ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u8mf4_m(...) __riscv_vsuxseg7ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u8mf8(...) __riscv_vsuxseg7ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg7ei32_v_u8mf8_m(...) __riscv_vsuxseg7ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f16m1(...) __riscv_vsuxseg7ei64_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f16m1_m(...) __riscv_vsuxseg7ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f16mf2(...) __riscv_vsuxseg7ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f16mf2_m(...) __riscv_vsuxseg7ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f16mf4(...) __riscv_vsuxseg7ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f16mf4_m(...) __riscv_vsuxseg7ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f32m1(...) __riscv_vsuxseg7ei64_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f32m1_m(...) __riscv_vsuxseg7ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f32mf2(...) __riscv_vsuxseg7ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f32mf2_m(...) __riscv_vsuxseg7ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f64m1(...) __riscv_vsuxseg7ei64_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_f64m1_m(...) __riscv_vsuxseg7ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i16m1(...) __riscv_vsuxseg7ei64_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i16m1_m(...) __riscv_vsuxseg7ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i16mf2(...) __riscv_vsuxseg7ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i16mf2_m(...) __riscv_vsuxseg7ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i16mf4(...) __riscv_vsuxseg7ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i16mf4_m(...) __riscv_vsuxseg7ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i32m1(...) __riscv_vsuxseg7ei64_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i32m1_m(...) __riscv_vsuxseg7ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i32mf2(...) __riscv_vsuxseg7ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i32mf2_m(...) __riscv_vsuxseg7ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i64m1(...) __riscv_vsuxseg7ei64_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i64m1_m(...) __riscv_vsuxseg7ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i8m1(...) __riscv_vsuxseg7ei64_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i8m1_m(...) __riscv_vsuxseg7ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i8mf2(...) __riscv_vsuxseg7ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i8mf2_m(...) __riscv_vsuxseg7ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i8mf4(...) __riscv_vsuxseg7ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i8mf4_m(...) __riscv_vsuxseg7ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i8mf8(...) __riscv_vsuxseg7ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_i8mf8_m(...) __riscv_vsuxseg7ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u16m1(...) __riscv_vsuxseg7ei64_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u16m1_m(...) __riscv_vsuxseg7ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u16mf2(...) __riscv_vsuxseg7ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u16mf2_m(...) __riscv_vsuxseg7ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u16mf4(...) __riscv_vsuxseg7ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u16mf4_m(...) __riscv_vsuxseg7ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u32m1(...) __riscv_vsuxseg7ei64_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u32m1_m(...) __riscv_vsuxseg7ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u32mf2(...) __riscv_vsuxseg7ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u32mf2_m(...) __riscv_vsuxseg7ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u64m1(...) __riscv_vsuxseg7ei64_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u64m1_m(...) __riscv_vsuxseg7ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u8m1(...) __riscv_vsuxseg7ei64_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u8m1_m(...) __riscv_vsuxseg7ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u8mf2(...) __riscv_vsuxseg7ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u8mf2_m(...) __riscv_vsuxseg7ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u8mf4(...) __riscv_vsuxseg7ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u8mf4_m(...) __riscv_vsuxseg7ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u8mf8(...) __riscv_vsuxseg7ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg7ei64_v_u8mf8_m(...) __riscv_vsuxseg7ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f16m1(...) __riscv_vsuxseg7ei8_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f16m1_m(...) __riscv_vsuxseg7ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f16mf2(...) __riscv_vsuxseg7ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f16mf2_m(...) __riscv_vsuxseg7ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f16mf4(...) __riscv_vsuxseg7ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f16mf4_m(...) __riscv_vsuxseg7ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f32m1(...) __riscv_vsuxseg7ei8_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f32m1_m(...) __riscv_vsuxseg7ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f32mf2(...) __riscv_vsuxseg7ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f32mf2_m(...) __riscv_vsuxseg7ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f64m1(...) __riscv_vsuxseg7ei8_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_f64m1_m(...) __riscv_vsuxseg7ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i16m1(...) __riscv_vsuxseg7ei8_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i16m1_m(...) __riscv_vsuxseg7ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i16mf2(...) __riscv_vsuxseg7ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i16mf2_m(...) __riscv_vsuxseg7ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i16mf4(...) __riscv_vsuxseg7ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i16mf4_m(...) __riscv_vsuxseg7ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i32m1(...) __riscv_vsuxseg7ei8_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i32m1_m(...) __riscv_vsuxseg7ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i32mf2(...) __riscv_vsuxseg7ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i32mf2_m(...) __riscv_vsuxseg7ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i64m1(...) __riscv_vsuxseg7ei8_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i64m1_m(...) __riscv_vsuxseg7ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i8m1(...) __riscv_vsuxseg7ei8_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i8m1_m(...) __riscv_vsuxseg7ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i8mf2(...) __riscv_vsuxseg7ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i8mf2_m(...) __riscv_vsuxseg7ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i8mf4(...) __riscv_vsuxseg7ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i8mf4_m(...) __riscv_vsuxseg7ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i8mf8(...) __riscv_vsuxseg7ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_i8mf8_m(...) __riscv_vsuxseg7ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u16m1(...) __riscv_vsuxseg7ei8_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u16m1_m(...) __riscv_vsuxseg7ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u16mf2(...) __riscv_vsuxseg7ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u16mf2_m(...) __riscv_vsuxseg7ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u16mf4(...) __riscv_vsuxseg7ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u16mf4_m(...) __riscv_vsuxseg7ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u32m1(...) __riscv_vsuxseg7ei8_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u32m1_m(...) __riscv_vsuxseg7ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u32mf2(...) __riscv_vsuxseg7ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u32mf2_m(...) __riscv_vsuxseg7ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u64m1(...) __riscv_vsuxseg7ei8_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u64m1_m(...) __riscv_vsuxseg7ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u8m1(...) __riscv_vsuxseg7ei8_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u8m1_m(...) __riscv_vsuxseg7ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u8mf2(...) __riscv_vsuxseg7ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u8mf2_m(...) __riscv_vsuxseg7ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u8mf4(...) __riscv_vsuxseg7ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u8mf4_m(...) __riscv_vsuxseg7ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u8mf8(...) __riscv_vsuxseg7ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg7ei8_v_u8mf8_m(...) __riscv_vsuxseg7ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f16m1(...) __riscv_vsuxseg8ei16_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f16m1_m(...) __riscv_vsuxseg8ei16_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f16mf2(...) __riscv_vsuxseg8ei16_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f16mf2_m(...) __riscv_vsuxseg8ei16_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f16mf4(...) __riscv_vsuxseg8ei16_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f16mf4_m(...) __riscv_vsuxseg8ei16_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f32m1(...) __riscv_vsuxseg8ei16_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f32m1_m(...) __riscv_vsuxseg8ei16_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f32mf2(...) __riscv_vsuxseg8ei16_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f32mf2_m(...) __riscv_vsuxseg8ei16_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f64m1(...) __riscv_vsuxseg8ei16_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_f64m1_m(...) __riscv_vsuxseg8ei16_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i16m1(...) __riscv_vsuxseg8ei16_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i16m1_m(...) __riscv_vsuxseg8ei16_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i16mf2(...) __riscv_vsuxseg8ei16_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i16mf2_m(...) __riscv_vsuxseg8ei16_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i16mf4(...) __riscv_vsuxseg8ei16_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i16mf4_m(...) __riscv_vsuxseg8ei16_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i32m1(...) __riscv_vsuxseg8ei16_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i32m1_m(...) __riscv_vsuxseg8ei16_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i32mf2(...) __riscv_vsuxseg8ei16_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i32mf2_m(...) __riscv_vsuxseg8ei16_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i64m1(...) __riscv_vsuxseg8ei16_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i64m1_m(...) __riscv_vsuxseg8ei16_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i8m1(...) __riscv_vsuxseg8ei16_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i8m1_m(...) __riscv_vsuxseg8ei16_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i8mf2(...) __riscv_vsuxseg8ei16_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i8mf2_m(...) __riscv_vsuxseg8ei16_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i8mf4(...) __riscv_vsuxseg8ei16_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i8mf4_m(...) __riscv_vsuxseg8ei16_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i8mf8(...) __riscv_vsuxseg8ei16_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_i8mf8_m(...) __riscv_vsuxseg8ei16_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u16m1(...) __riscv_vsuxseg8ei16_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u16m1_m(...) __riscv_vsuxseg8ei16_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u16mf2(...) __riscv_vsuxseg8ei16_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u16mf2_m(...) __riscv_vsuxseg8ei16_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u16mf4(...) __riscv_vsuxseg8ei16_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u16mf4_m(...) __riscv_vsuxseg8ei16_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u32m1(...) __riscv_vsuxseg8ei16_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u32m1_m(...) __riscv_vsuxseg8ei16_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u32mf2(...) __riscv_vsuxseg8ei16_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u32mf2_m(...) __riscv_vsuxseg8ei16_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u64m1(...) __riscv_vsuxseg8ei16_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u64m1_m(...) __riscv_vsuxseg8ei16_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u8m1(...) __riscv_vsuxseg8ei16_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u8m1_m(...) __riscv_vsuxseg8ei16_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u8mf2(...) __riscv_vsuxseg8ei16_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u8mf2_m(...) __riscv_vsuxseg8ei16_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u8mf4(...) __riscv_vsuxseg8ei16_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u8mf4_m(...) __riscv_vsuxseg8ei16_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u8mf8(...) __riscv_vsuxseg8ei16_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg8ei16_v_u8mf8_m(...) __riscv_vsuxseg8ei16_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f16m1(...) __riscv_vsuxseg8ei32_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f16m1_m(...) __riscv_vsuxseg8ei32_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f16mf2(...) __riscv_vsuxseg8ei32_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f16mf2_m(...) __riscv_vsuxseg8ei32_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f16mf4(...) __riscv_vsuxseg8ei32_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f16mf4_m(...) __riscv_vsuxseg8ei32_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f32m1(...) __riscv_vsuxseg8ei32_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f32m1_m(...) __riscv_vsuxseg8ei32_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f32mf2(...) __riscv_vsuxseg8ei32_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f32mf2_m(...) __riscv_vsuxseg8ei32_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f64m1(...) __riscv_vsuxseg8ei32_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_f64m1_m(...) __riscv_vsuxseg8ei32_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i16m1(...) __riscv_vsuxseg8ei32_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i16m1_m(...) __riscv_vsuxseg8ei32_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i16mf2(...) __riscv_vsuxseg8ei32_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i16mf2_m(...) __riscv_vsuxseg8ei32_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i16mf4(...) __riscv_vsuxseg8ei32_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i16mf4_m(...) __riscv_vsuxseg8ei32_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i32m1(...) __riscv_vsuxseg8ei32_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i32m1_m(...) __riscv_vsuxseg8ei32_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i32mf2(...) __riscv_vsuxseg8ei32_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i32mf2_m(...) __riscv_vsuxseg8ei32_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i64m1(...) __riscv_vsuxseg8ei32_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i64m1_m(...) __riscv_vsuxseg8ei32_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i8m1(...) __riscv_vsuxseg8ei32_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i8m1_m(...) __riscv_vsuxseg8ei32_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i8mf2(...) __riscv_vsuxseg8ei32_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i8mf2_m(...) __riscv_vsuxseg8ei32_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i8mf4(...) __riscv_vsuxseg8ei32_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i8mf4_m(...) __riscv_vsuxseg8ei32_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i8mf8(...) __riscv_vsuxseg8ei32_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_i8mf8_m(...) __riscv_vsuxseg8ei32_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u16m1(...) __riscv_vsuxseg8ei32_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u16m1_m(...) __riscv_vsuxseg8ei32_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u16mf2(...) __riscv_vsuxseg8ei32_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u16mf2_m(...) __riscv_vsuxseg8ei32_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u16mf4(...) __riscv_vsuxseg8ei32_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u16mf4_m(...) __riscv_vsuxseg8ei32_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u32m1(...) __riscv_vsuxseg8ei32_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u32m1_m(...) __riscv_vsuxseg8ei32_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u32mf2(...) __riscv_vsuxseg8ei32_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u32mf2_m(...) __riscv_vsuxseg8ei32_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u64m1(...) __riscv_vsuxseg8ei32_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u64m1_m(...) __riscv_vsuxseg8ei32_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u8m1(...) __riscv_vsuxseg8ei32_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u8m1_m(...) __riscv_vsuxseg8ei32_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u8mf2(...) __riscv_vsuxseg8ei32_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u8mf2_m(...) __riscv_vsuxseg8ei32_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u8mf4(...) __riscv_vsuxseg8ei32_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u8mf4_m(...) __riscv_vsuxseg8ei32_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u8mf8(...) __riscv_vsuxseg8ei32_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg8ei32_v_u8mf8_m(...) __riscv_vsuxseg8ei32_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f16m1(...) __riscv_vsuxseg8ei64_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f16m1_m(...) __riscv_vsuxseg8ei64_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f16mf2(...) __riscv_vsuxseg8ei64_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f16mf2_m(...) __riscv_vsuxseg8ei64_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f16mf4(...) __riscv_vsuxseg8ei64_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f16mf4_m(...) __riscv_vsuxseg8ei64_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f32m1(...) __riscv_vsuxseg8ei64_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f32m1_m(...) __riscv_vsuxseg8ei64_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f32mf2(...) __riscv_vsuxseg8ei64_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f32mf2_m(...) __riscv_vsuxseg8ei64_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f64m1(...) __riscv_vsuxseg8ei64_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_f64m1_m(...) __riscv_vsuxseg8ei64_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i16m1(...) __riscv_vsuxseg8ei64_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i16m1_m(...) __riscv_vsuxseg8ei64_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i16mf2(...) __riscv_vsuxseg8ei64_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i16mf2_m(...) __riscv_vsuxseg8ei64_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i16mf4(...) __riscv_vsuxseg8ei64_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i16mf4_m(...) __riscv_vsuxseg8ei64_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i32m1(...) __riscv_vsuxseg8ei64_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i32m1_m(...) __riscv_vsuxseg8ei64_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i32mf2(...) __riscv_vsuxseg8ei64_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i32mf2_m(...) __riscv_vsuxseg8ei64_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i64m1(...) __riscv_vsuxseg8ei64_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i64m1_m(...) __riscv_vsuxseg8ei64_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i8m1(...) __riscv_vsuxseg8ei64_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i8m1_m(...) __riscv_vsuxseg8ei64_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i8mf2(...) __riscv_vsuxseg8ei64_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i8mf2_m(...) __riscv_vsuxseg8ei64_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i8mf4(...) __riscv_vsuxseg8ei64_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i8mf4_m(...) __riscv_vsuxseg8ei64_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i8mf8(...) __riscv_vsuxseg8ei64_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_i8mf8_m(...) __riscv_vsuxseg8ei64_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u16m1(...) __riscv_vsuxseg8ei64_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u16m1_m(...) __riscv_vsuxseg8ei64_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u16mf2(...) __riscv_vsuxseg8ei64_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u16mf2_m(...) __riscv_vsuxseg8ei64_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u16mf4(...) __riscv_vsuxseg8ei64_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u16mf4_m(...) __riscv_vsuxseg8ei64_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u32m1(...) __riscv_vsuxseg8ei64_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u32m1_m(...) __riscv_vsuxseg8ei64_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u32mf2(...) __riscv_vsuxseg8ei64_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u32mf2_m(...) __riscv_vsuxseg8ei64_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u64m1(...) __riscv_vsuxseg8ei64_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u64m1_m(...) __riscv_vsuxseg8ei64_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u8m1(...) __riscv_vsuxseg8ei64_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u8m1_m(...) __riscv_vsuxseg8ei64_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u8mf2(...) __riscv_vsuxseg8ei64_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u8mf2_m(...) __riscv_vsuxseg8ei64_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u8mf4(...) __riscv_vsuxseg8ei64_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u8mf4_m(...) __riscv_vsuxseg8ei64_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u8mf8(...) __riscv_vsuxseg8ei64_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg8ei64_v_u8mf8_m(...) __riscv_vsuxseg8ei64_v_u8mf8_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f16m1(...) __riscv_vsuxseg8ei8_v_f16m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f16m1_m(...) __riscv_vsuxseg8ei8_v_f16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f16mf2(...) __riscv_vsuxseg8ei8_v_f16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f16mf2_m(...) __riscv_vsuxseg8ei8_v_f16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f16mf4(...) __riscv_vsuxseg8ei8_v_f16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f16mf4_m(...) __riscv_vsuxseg8ei8_v_f16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f32m1(...) __riscv_vsuxseg8ei8_v_f32m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f32m1_m(...) __riscv_vsuxseg8ei8_v_f32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f32mf2(...) __riscv_vsuxseg8ei8_v_f32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f32mf2_m(...) __riscv_vsuxseg8ei8_v_f32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f64m1(...) __riscv_vsuxseg8ei8_v_f64m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_f64m1_m(...) __riscv_vsuxseg8ei8_v_f64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i16m1(...) __riscv_vsuxseg8ei8_v_i16m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i16m1_m(...) __riscv_vsuxseg8ei8_v_i16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i16mf2(...) __riscv_vsuxseg8ei8_v_i16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i16mf2_m(...) __riscv_vsuxseg8ei8_v_i16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i16mf4(...) __riscv_vsuxseg8ei8_v_i16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i16mf4_m(...) __riscv_vsuxseg8ei8_v_i16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i32m1(...) __riscv_vsuxseg8ei8_v_i32m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i32m1_m(...) __riscv_vsuxseg8ei8_v_i32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i32mf2(...) __riscv_vsuxseg8ei8_v_i32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i32mf2_m(...) __riscv_vsuxseg8ei8_v_i32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i64m1(...) __riscv_vsuxseg8ei8_v_i64m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i64m1_m(...) __riscv_vsuxseg8ei8_v_i64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i8m1(...) __riscv_vsuxseg8ei8_v_i8m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i8m1_m(...) __riscv_vsuxseg8ei8_v_i8m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i8mf2(...) __riscv_vsuxseg8ei8_v_i8mf2(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i8mf2_m(...) __riscv_vsuxseg8ei8_v_i8mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i8mf4(...) __riscv_vsuxseg8ei8_v_i8mf4(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i8mf4_m(...) __riscv_vsuxseg8ei8_v_i8mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i8mf8(...) __riscv_vsuxseg8ei8_v_i8mf8(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_i8mf8_m(...) __riscv_vsuxseg8ei8_v_i8mf8_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u16m1(...) __riscv_vsuxseg8ei8_v_u16m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u16m1_m(...) __riscv_vsuxseg8ei8_v_u16m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u16mf2(...) __riscv_vsuxseg8ei8_v_u16mf2(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u16mf2_m(...) __riscv_vsuxseg8ei8_v_u16mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u16mf4(...) __riscv_vsuxseg8ei8_v_u16mf4(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u16mf4_m(...) __riscv_vsuxseg8ei8_v_u16mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u32m1(...) __riscv_vsuxseg8ei8_v_u32m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u32m1_m(...) __riscv_vsuxseg8ei8_v_u32m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u32mf2(...) __riscv_vsuxseg8ei8_v_u32mf2(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u32mf2_m(...) __riscv_vsuxseg8ei8_v_u32mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u64m1(...) __riscv_vsuxseg8ei8_v_u64m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u64m1_m(...) __riscv_vsuxseg8ei8_v_u64m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u8m1(...) __riscv_vsuxseg8ei8_v_u8m1(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u8m1_m(...) __riscv_vsuxseg8ei8_v_u8m1_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u8mf2(...) __riscv_vsuxseg8ei8_v_u8mf2(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u8mf2_m(...) __riscv_vsuxseg8ei8_v_u8mf2_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u8mf4(...) __riscv_vsuxseg8ei8_v_u8mf4(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u8mf4_m(...) __riscv_vsuxseg8ei8_v_u8mf4_m(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u8mf8(...) __riscv_vsuxseg8ei8_v_u8mf8(__VA_ARGS__) |
| #define | vsuxseg8ei8_v_u8mf8_m(...) __riscv_vsuxseg8ei8_v_u8mf8_m(__VA_ARGS__) |
| #define | vundefined_f16m1(...) __riscv_vundefined_f16m1(__VA_ARGS__) |
| #define | vundefined_f16m2(...) __riscv_vundefined_f16m2(__VA_ARGS__) |
| #define | vundefined_f16m4(...) __riscv_vundefined_f16m4(__VA_ARGS__) |
| #define | vundefined_f16m8(...) __riscv_vundefined_f16m8(__VA_ARGS__) |
| #define | vundefined_f16mf2(...) __riscv_vundefined_f16mf2(__VA_ARGS__) |
| #define | vundefined_f16mf4(...) __riscv_vundefined_f16mf4(__VA_ARGS__) |
| #define | vundefined_f32m1(...) __riscv_vundefined_f32m1(__VA_ARGS__) |
| #define | vundefined_f32m2(...) __riscv_vundefined_f32m2(__VA_ARGS__) |
| #define | vundefined_f32m4(...) __riscv_vundefined_f32m4(__VA_ARGS__) |
| #define | vundefined_f32m8(...) __riscv_vundefined_f32m8(__VA_ARGS__) |
| #define | vundefined_f32mf2(...) __riscv_vundefined_f32mf2(__VA_ARGS__) |
| #define | vundefined_f64m1(...) __riscv_vundefined_f64m1(__VA_ARGS__) |
| #define | vundefined_f64m2(...) __riscv_vundefined_f64m2(__VA_ARGS__) |
| #define | vundefined_f64m4(...) __riscv_vundefined_f64m4(__VA_ARGS__) |
| #define | vundefined_f64m8(...) __riscv_vundefined_f64m8(__VA_ARGS__) |
| #define | vundefined_i16m1(...) __riscv_vundefined_i16m1(__VA_ARGS__) |
| #define | vundefined_i16m2(...) __riscv_vundefined_i16m2(__VA_ARGS__) |
| #define | vundefined_i16m4(...) __riscv_vundefined_i16m4(__VA_ARGS__) |
| #define | vundefined_i16m8(...) __riscv_vundefined_i16m8(__VA_ARGS__) |
| #define | vundefined_i16mf2(...) __riscv_vundefined_i16mf2(__VA_ARGS__) |
| #define | vundefined_i16mf4(...) __riscv_vundefined_i16mf4(__VA_ARGS__) |
| #define | vundefined_i32m1(...) __riscv_vundefined_i32m1(__VA_ARGS__) |
| #define | vundefined_i32m2(...) __riscv_vundefined_i32m2(__VA_ARGS__) |
| #define | vundefined_i32m4(...) __riscv_vundefined_i32m4(__VA_ARGS__) |
| #define | vundefined_i32m8(...) __riscv_vundefined_i32m8(__VA_ARGS__) |
| #define | vundefined_i32mf2(...) __riscv_vundefined_i32mf2(__VA_ARGS__) |
| #define | vundefined_i64m1(...) __riscv_vundefined_i64m1(__VA_ARGS__) |
| #define | vundefined_i64m2(...) __riscv_vundefined_i64m2(__VA_ARGS__) |
| #define | vundefined_i64m4(...) __riscv_vundefined_i64m4(__VA_ARGS__) |
| #define | vundefined_i64m8(...) __riscv_vundefined_i64m8(__VA_ARGS__) |
| #define | vundefined_i8m1(...) __riscv_vundefined_i8m1(__VA_ARGS__) |
| #define | vundefined_i8m2(...) __riscv_vundefined_i8m2(__VA_ARGS__) |
| #define | vundefined_i8m4(...) __riscv_vundefined_i8m4(__VA_ARGS__) |
| #define | vundefined_i8m8(...) __riscv_vundefined_i8m8(__VA_ARGS__) |
| #define | vundefined_i8mf2(...) __riscv_vundefined_i8mf2(__VA_ARGS__) |
| #define | vundefined_i8mf4(...) __riscv_vundefined_i8mf4(__VA_ARGS__) |
| #define | vundefined_i8mf8(...) __riscv_vundefined_i8mf8(__VA_ARGS__) |
| #define | vundefined_u16m1(...) __riscv_vundefined_u16m1(__VA_ARGS__) |
| #define | vundefined_u16m2(...) __riscv_vundefined_u16m2(__VA_ARGS__) |
| #define | vundefined_u16m4(...) __riscv_vundefined_u16m4(__VA_ARGS__) |
| #define | vundefined_u16m8(...) __riscv_vundefined_u16m8(__VA_ARGS__) |
| #define | vundefined_u16mf2(...) __riscv_vundefined_u16mf2(__VA_ARGS__) |
| #define | vundefined_u16mf4(...) __riscv_vundefined_u16mf4(__VA_ARGS__) |
| #define | vundefined_u32m1(...) __riscv_vundefined_u32m1(__VA_ARGS__) |
| #define | vundefined_u32m2(...) __riscv_vundefined_u32m2(__VA_ARGS__) |
| #define | vundefined_u32m4(...) __riscv_vundefined_u32m4(__VA_ARGS__) |
| #define | vundefined_u32m8(...) __riscv_vundefined_u32m8(__VA_ARGS__) |
| #define | vundefined_u32mf2(...) __riscv_vundefined_u32mf2(__VA_ARGS__) |
| #define | vundefined_u64m1(...) __riscv_vundefined_u64m1(__VA_ARGS__) |
| #define | vundefined_u64m2(...) __riscv_vundefined_u64m2(__VA_ARGS__) |
| #define | vundefined_u64m4(...) __riscv_vundefined_u64m4(__VA_ARGS__) |
| #define | vundefined_u64m8(...) __riscv_vundefined_u64m8(__VA_ARGS__) |
| #define | vundefined_u8m1(...) __riscv_vundefined_u8m1(__VA_ARGS__) |
| #define | vundefined_u8m2(...) __riscv_vundefined_u8m2(__VA_ARGS__) |
| #define | vundefined_u8m4(...) __riscv_vundefined_u8m4(__VA_ARGS__) |
| #define | vundefined_u8m8(...) __riscv_vundefined_u8m8(__VA_ARGS__) |
| #define | vundefined_u8mf2(...) __riscv_vundefined_u8mf2(__VA_ARGS__) |
| #define | vundefined_u8mf4(...) __riscv_vundefined_u8mf4(__VA_ARGS__) |
| #define | vundefined_u8mf8(...) __riscv_vundefined_u8mf8(__VA_ARGS__) |
| #define | vwadd_vv_i16m1(...) __riscv_vwadd_vv_i16m1(__VA_ARGS__) |
| #define | vwadd_vv_i16m1_m(...) __riscv_vwadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i16m2(...) __riscv_vwadd_vv_i16m2(__VA_ARGS__) |
| #define | vwadd_vv_i16m2_m(...) __riscv_vwadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i16m4(...) __riscv_vwadd_vv_i16m4(__VA_ARGS__) |
| #define | vwadd_vv_i16m4_m(...) __riscv_vwadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i16m8(...) __riscv_vwadd_vv_i16m8(__VA_ARGS__) |
| #define | vwadd_vv_i16m8_m(...) __riscv_vwadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i16mf2(...) __riscv_vwadd_vv_i16mf2(__VA_ARGS__) |
| #define | vwadd_vv_i16mf2_m(...) __riscv_vwadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i16mf4(...) __riscv_vwadd_vv_i16mf4(__VA_ARGS__) |
| #define | vwadd_vv_i16mf4_m(...) __riscv_vwadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i32m1(...) __riscv_vwadd_vv_i32m1(__VA_ARGS__) |
| #define | vwadd_vv_i32m1_m(...) __riscv_vwadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i32m2(...) __riscv_vwadd_vv_i32m2(__VA_ARGS__) |
| #define | vwadd_vv_i32m2_m(...) __riscv_vwadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i32m4(...) __riscv_vwadd_vv_i32m4(__VA_ARGS__) |
| #define | vwadd_vv_i32m4_m(...) __riscv_vwadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i32m8(...) __riscv_vwadd_vv_i32m8(__VA_ARGS__) |
| #define | vwadd_vv_i32m8_m(...) __riscv_vwadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i32mf2(...) __riscv_vwadd_vv_i32mf2(__VA_ARGS__) |
| #define | vwadd_vv_i32mf2_m(...) __riscv_vwadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i64m1(...) __riscv_vwadd_vv_i64m1(__VA_ARGS__) |
| #define | vwadd_vv_i64m1_m(...) __riscv_vwadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i64m2(...) __riscv_vwadd_vv_i64m2(__VA_ARGS__) |
| #define | vwadd_vv_i64m2_m(...) __riscv_vwadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i64m4(...) __riscv_vwadd_vv_i64m4(__VA_ARGS__) |
| #define | vwadd_vv_i64m4_m(...) __riscv_vwadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vwadd_vv_i64m8(...) __riscv_vwadd_vv_i64m8(__VA_ARGS__) |
| #define | vwadd_vv_i64m8_m(...) __riscv_vwadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i16m1(...) __riscv_vwadd_vx_i16m1(__VA_ARGS__) |
| #define | vwadd_vx_i16m1_m(...) __riscv_vwadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i16m2(...) __riscv_vwadd_vx_i16m2(__VA_ARGS__) |
| #define | vwadd_vx_i16m2_m(...) __riscv_vwadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i16m4(...) __riscv_vwadd_vx_i16m4(__VA_ARGS__) |
| #define | vwadd_vx_i16m4_m(...) __riscv_vwadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i16m8(...) __riscv_vwadd_vx_i16m8(__VA_ARGS__) |
| #define | vwadd_vx_i16m8_m(...) __riscv_vwadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i16mf2(...) __riscv_vwadd_vx_i16mf2(__VA_ARGS__) |
| #define | vwadd_vx_i16mf2_m(...) __riscv_vwadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i16mf4(...) __riscv_vwadd_vx_i16mf4(__VA_ARGS__) |
| #define | vwadd_vx_i16mf4_m(...) __riscv_vwadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i32m1(...) __riscv_vwadd_vx_i32m1(__VA_ARGS__) |
| #define | vwadd_vx_i32m1_m(...) __riscv_vwadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i32m2(...) __riscv_vwadd_vx_i32m2(__VA_ARGS__) |
| #define | vwadd_vx_i32m2_m(...) __riscv_vwadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i32m4(...) __riscv_vwadd_vx_i32m4(__VA_ARGS__) |
| #define | vwadd_vx_i32m4_m(...) __riscv_vwadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i32m8(...) __riscv_vwadd_vx_i32m8(__VA_ARGS__) |
| #define | vwadd_vx_i32m8_m(...) __riscv_vwadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i32mf2(...) __riscv_vwadd_vx_i32mf2(__VA_ARGS__) |
| #define | vwadd_vx_i32mf2_m(...) __riscv_vwadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i64m1(...) __riscv_vwadd_vx_i64m1(__VA_ARGS__) |
| #define | vwadd_vx_i64m1_m(...) __riscv_vwadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i64m2(...) __riscv_vwadd_vx_i64m2(__VA_ARGS__) |
| #define | vwadd_vx_i64m2_m(...) __riscv_vwadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i64m4(...) __riscv_vwadd_vx_i64m4(__VA_ARGS__) |
| #define | vwadd_vx_i64m4_m(...) __riscv_vwadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vwadd_vx_i64m8(...) __riscv_vwadd_vx_i64m8(__VA_ARGS__) |
| #define | vwadd_vx_i64m8_m(...) __riscv_vwadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i16m1(...) __riscv_vwadd_wv_i16m1(__VA_ARGS__) |
| #define | vwadd_wv_i16m1_m(...) __riscv_vwadd_wv_i16m1_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i16m2(...) __riscv_vwadd_wv_i16m2(__VA_ARGS__) |
| #define | vwadd_wv_i16m2_m(...) __riscv_vwadd_wv_i16m2_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i16m4(...) __riscv_vwadd_wv_i16m4(__VA_ARGS__) |
| #define | vwadd_wv_i16m4_m(...) __riscv_vwadd_wv_i16m4_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i16m8(...) __riscv_vwadd_wv_i16m8(__VA_ARGS__) |
| #define | vwadd_wv_i16m8_m(...) __riscv_vwadd_wv_i16m8_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i16mf2(...) __riscv_vwadd_wv_i16mf2(__VA_ARGS__) |
| #define | vwadd_wv_i16mf2_m(...) __riscv_vwadd_wv_i16mf2_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i16mf4(...) __riscv_vwadd_wv_i16mf4(__VA_ARGS__) |
| #define | vwadd_wv_i16mf4_m(...) __riscv_vwadd_wv_i16mf4_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i32m1(...) __riscv_vwadd_wv_i32m1(__VA_ARGS__) |
| #define | vwadd_wv_i32m1_m(...) __riscv_vwadd_wv_i32m1_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i32m2(...) __riscv_vwadd_wv_i32m2(__VA_ARGS__) |
| #define | vwadd_wv_i32m2_m(...) __riscv_vwadd_wv_i32m2_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i32m4(...) __riscv_vwadd_wv_i32m4(__VA_ARGS__) |
| #define | vwadd_wv_i32m4_m(...) __riscv_vwadd_wv_i32m4_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i32m8(...) __riscv_vwadd_wv_i32m8(__VA_ARGS__) |
| #define | vwadd_wv_i32m8_m(...) __riscv_vwadd_wv_i32m8_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i32mf2(...) __riscv_vwadd_wv_i32mf2(__VA_ARGS__) |
| #define | vwadd_wv_i32mf2_m(...) __riscv_vwadd_wv_i32mf2_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i64m1(...) __riscv_vwadd_wv_i64m1(__VA_ARGS__) |
| #define | vwadd_wv_i64m1_m(...) __riscv_vwadd_wv_i64m1_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i64m2(...) __riscv_vwadd_wv_i64m2(__VA_ARGS__) |
| #define | vwadd_wv_i64m2_m(...) __riscv_vwadd_wv_i64m2_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i64m4(...) __riscv_vwadd_wv_i64m4(__VA_ARGS__) |
| #define | vwadd_wv_i64m4_m(...) __riscv_vwadd_wv_i64m4_tumu(__VA_ARGS__) |
| #define | vwadd_wv_i64m8(...) __riscv_vwadd_wv_i64m8(__VA_ARGS__) |
| #define | vwadd_wv_i64m8_m(...) __riscv_vwadd_wv_i64m8_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i16m1(...) __riscv_vwadd_wx_i16m1(__VA_ARGS__) |
| #define | vwadd_wx_i16m1_m(...) __riscv_vwadd_wx_i16m1_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i16m2(...) __riscv_vwadd_wx_i16m2(__VA_ARGS__) |
| #define | vwadd_wx_i16m2_m(...) __riscv_vwadd_wx_i16m2_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i16m4(...) __riscv_vwadd_wx_i16m4(__VA_ARGS__) |
| #define | vwadd_wx_i16m4_m(...) __riscv_vwadd_wx_i16m4_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i16m8(...) __riscv_vwadd_wx_i16m8(__VA_ARGS__) |
| #define | vwadd_wx_i16m8_m(...) __riscv_vwadd_wx_i16m8_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i16mf2(...) __riscv_vwadd_wx_i16mf2(__VA_ARGS__) |
| #define | vwadd_wx_i16mf2_m(...) __riscv_vwadd_wx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i16mf4(...) __riscv_vwadd_wx_i16mf4(__VA_ARGS__) |
| #define | vwadd_wx_i16mf4_m(...) __riscv_vwadd_wx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i32m1(...) __riscv_vwadd_wx_i32m1(__VA_ARGS__) |
| #define | vwadd_wx_i32m1_m(...) __riscv_vwadd_wx_i32m1_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i32m2(...) __riscv_vwadd_wx_i32m2(__VA_ARGS__) |
| #define | vwadd_wx_i32m2_m(...) __riscv_vwadd_wx_i32m2_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i32m4(...) __riscv_vwadd_wx_i32m4(__VA_ARGS__) |
| #define | vwadd_wx_i32m4_m(...) __riscv_vwadd_wx_i32m4_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i32m8(...) __riscv_vwadd_wx_i32m8(__VA_ARGS__) |
| #define | vwadd_wx_i32m8_m(...) __riscv_vwadd_wx_i32m8_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i32mf2(...) __riscv_vwadd_wx_i32mf2(__VA_ARGS__) |
| #define | vwadd_wx_i32mf2_m(...) __riscv_vwadd_wx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i64m1(...) __riscv_vwadd_wx_i64m1(__VA_ARGS__) |
| #define | vwadd_wx_i64m1_m(...) __riscv_vwadd_wx_i64m1_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i64m2(...) __riscv_vwadd_wx_i64m2(__VA_ARGS__) |
| #define | vwadd_wx_i64m2_m(...) __riscv_vwadd_wx_i64m2_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i64m4(...) __riscv_vwadd_wx_i64m4(__VA_ARGS__) |
| #define | vwadd_wx_i64m4_m(...) __riscv_vwadd_wx_i64m4_tumu(__VA_ARGS__) |
| #define | vwadd_wx_i64m8(...) __riscv_vwadd_wx_i64m8(__VA_ARGS__) |
| #define | vwadd_wx_i64m8_m(...) __riscv_vwadd_wx_i64m8_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u16m1(...) __riscv_vwaddu_vv_u16m1(__VA_ARGS__) |
| #define | vwaddu_vv_u16m1_m(...) __riscv_vwaddu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u16m2(...) __riscv_vwaddu_vv_u16m2(__VA_ARGS__) |
| #define | vwaddu_vv_u16m2_m(...) __riscv_vwaddu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u16m4(...) __riscv_vwaddu_vv_u16m4(__VA_ARGS__) |
| #define | vwaddu_vv_u16m4_m(...) __riscv_vwaddu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u16m8(...) __riscv_vwaddu_vv_u16m8(__VA_ARGS__) |
| #define | vwaddu_vv_u16m8_m(...) __riscv_vwaddu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u16mf2(...) __riscv_vwaddu_vv_u16mf2(__VA_ARGS__) |
| #define | vwaddu_vv_u16mf2_m(...) __riscv_vwaddu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u16mf4(...) __riscv_vwaddu_vv_u16mf4(__VA_ARGS__) |
| #define | vwaddu_vv_u16mf4_m(...) __riscv_vwaddu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u32m1(...) __riscv_vwaddu_vv_u32m1(__VA_ARGS__) |
| #define | vwaddu_vv_u32m1_m(...) __riscv_vwaddu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u32m2(...) __riscv_vwaddu_vv_u32m2(__VA_ARGS__) |
| #define | vwaddu_vv_u32m2_m(...) __riscv_vwaddu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u32m4(...) __riscv_vwaddu_vv_u32m4(__VA_ARGS__) |
| #define | vwaddu_vv_u32m4_m(...) __riscv_vwaddu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u32m8(...) __riscv_vwaddu_vv_u32m8(__VA_ARGS__) |
| #define | vwaddu_vv_u32m8_m(...) __riscv_vwaddu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u32mf2(...) __riscv_vwaddu_vv_u32mf2(__VA_ARGS__) |
| #define | vwaddu_vv_u32mf2_m(...) __riscv_vwaddu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u64m1(...) __riscv_vwaddu_vv_u64m1(__VA_ARGS__) |
| #define | vwaddu_vv_u64m1_m(...) __riscv_vwaddu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u64m2(...) __riscv_vwaddu_vv_u64m2(__VA_ARGS__) |
| #define | vwaddu_vv_u64m2_m(...) __riscv_vwaddu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u64m4(...) __riscv_vwaddu_vv_u64m4(__VA_ARGS__) |
| #define | vwaddu_vv_u64m4_m(...) __riscv_vwaddu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vwaddu_vv_u64m8(...) __riscv_vwaddu_vv_u64m8(__VA_ARGS__) |
| #define | vwaddu_vv_u64m8_m(...) __riscv_vwaddu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u16m1(...) __riscv_vwaddu_vx_u16m1(__VA_ARGS__) |
| #define | vwaddu_vx_u16m1_m(...) __riscv_vwaddu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u16m2(...) __riscv_vwaddu_vx_u16m2(__VA_ARGS__) |
| #define | vwaddu_vx_u16m2_m(...) __riscv_vwaddu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u16m4(...) __riscv_vwaddu_vx_u16m4(__VA_ARGS__) |
| #define | vwaddu_vx_u16m4_m(...) __riscv_vwaddu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u16m8(...) __riscv_vwaddu_vx_u16m8(__VA_ARGS__) |
| #define | vwaddu_vx_u16m8_m(...) __riscv_vwaddu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u16mf2(...) __riscv_vwaddu_vx_u16mf2(__VA_ARGS__) |
| #define | vwaddu_vx_u16mf2_m(...) __riscv_vwaddu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u16mf4(...) __riscv_vwaddu_vx_u16mf4(__VA_ARGS__) |
| #define | vwaddu_vx_u16mf4_m(...) __riscv_vwaddu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u32m1(...) __riscv_vwaddu_vx_u32m1(__VA_ARGS__) |
| #define | vwaddu_vx_u32m1_m(...) __riscv_vwaddu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u32m2(...) __riscv_vwaddu_vx_u32m2(__VA_ARGS__) |
| #define | vwaddu_vx_u32m2_m(...) __riscv_vwaddu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u32m4(...) __riscv_vwaddu_vx_u32m4(__VA_ARGS__) |
| #define | vwaddu_vx_u32m4_m(...) __riscv_vwaddu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u32m8(...) __riscv_vwaddu_vx_u32m8(__VA_ARGS__) |
| #define | vwaddu_vx_u32m8_m(...) __riscv_vwaddu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u32mf2(...) __riscv_vwaddu_vx_u32mf2(__VA_ARGS__) |
| #define | vwaddu_vx_u32mf2_m(...) __riscv_vwaddu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u64m1(...) __riscv_vwaddu_vx_u64m1(__VA_ARGS__) |
| #define | vwaddu_vx_u64m1_m(...) __riscv_vwaddu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u64m2(...) __riscv_vwaddu_vx_u64m2(__VA_ARGS__) |
| #define | vwaddu_vx_u64m2_m(...) __riscv_vwaddu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u64m4(...) __riscv_vwaddu_vx_u64m4(__VA_ARGS__) |
| #define | vwaddu_vx_u64m4_m(...) __riscv_vwaddu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vwaddu_vx_u64m8(...) __riscv_vwaddu_vx_u64m8(__VA_ARGS__) |
| #define | vwaddu_vx_u64m8_m(...) __riscv_vwaddu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u16m1(...) __riscv_vwaddu_wv_u16m1(__VA_ARGS__) |
| #define | vwaddu_wv_u16m1_m(...) __riscv_vwaddu_wv_u16m1_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u16m2(...) __riscv_vwaddu_wv_u16m2(__VA_ARGS__) |
| #define | vwaddu_wv_u16m2_m(...) __riscv_vwaddu_wv_u16m2_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u16m4(...) __riscv_vwaddu_wv_u16m4(__VA_ARGS__) |
| #define | vwaddu_wv_u16m4_m(...) __riscv_vwaddu_wv_u16m4_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u16m8(...) __riscv_vwaddu_wv_u16m8(__VA_ARGS__) |
| #define | vwaddu_wv_u16m8_m(...) __riscv_vwaddu_wv_u16m8_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u16mf2(...) __riscv_vwaddu_wv_u16mf2(__VA_ARGS__) |
| #define | vwaddu_wv_u16mf2_m(...) __riscv_vwaddu_wv_u16mf2_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u16mf4(...) __riscv_vwaddu_wv_u16mf4(__VA_ARGS__) |
| #define | vwaddu_wv_u16mf4_m(...) __riscv_vwaddu_wv_u16mf4_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u32m1(...) __riscv_vwaddu_wv_u32m1(__VA_ARGS__) |
| #define | vwaddu_wv_u32m1_m(...) __riscv_vwaddu_wv_u32m1_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u32m2(...) __riscv_vwaddu_wv_u32m2(__VA_ARGS__) |
| #define | vwaddu_wv_u32m2_m(...) __riscv_vwaddu_wv_u32m2_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u32m4(...) __riscv_vwaddu_wv_u32m4(__VA_ARGS__) |
| #define | vwaddu_wv_u32m4_m(...) __riscv_vwaddu_wv_u32m4_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u32m8(...) __riscv_vwaddu_wv_u32m8(__VA_ARGS__) |
| #define | vwaddu_wv_u32m8_m(...) __riscv_vwaddu_wv_u32m8_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u32mf2(...) __riscv_vwaddu_wv_u32mf2(__VA_ARGS__) |
| #define | vwaddu_wv_u32mf2_m(...) __riscv_vwaddu_wv_u32mf2_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u64m1(...) __riscv_vwaddu_wv_u64m1(__VA_ARGS__) |
| #define | vwaddu_wv_u64m1_m(...) __riscv_vwaddu_wv_u64m1_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u64m2(...) __riscv_vwaddu_wv_u64m2(__VA_ARGS__) |
| #define | vwaddu_wv_u64m2_m(...) __riscv_vwaddu_wv_u64m2_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u64m4(...) __riscv_vwaddu_wv_u64m4(__VA_ARGS__) |
| #define | vwaddu_wv_u64m4_m(...) __riscv_vwaddu_wv_u64m4_tumu(__VA_ARGS__) |
| #define | vwaddu_wv_u64m8(...) __riscv_vwaddu_wv_u64m8(__VA_ARGS__) |
| #define | vwaddu_wv_u64m8_m(...) __riscv_vwaddu_wv_u64m8_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u16m1(...) __riscv_vwaddu_wx_u16m1(__VA_ARGS__) |
| #define | vwaddu_wx_u16m1_m(...) __riscv_vwaddu_wx_u16m1_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u16m2(...) __riscv_vwaddu_wx_u16m2(__VA_ARGS__) |
| #define | vwaddu_wx_u16m2_m(...) __riscv_vwaddu_wx_u16m2_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u16m4(...) __riscv_vwaddu_wx_u16m4(__VA_ARGS__) |
| #define | vwaddu_wx_u16m4_m(...) __riscv_vwaddu_wx_u16m4_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u16m8(...) __riscv_vwaddu_wx_u16m8(__VA_ARGS__) |
| #define | vwaddu_wx_u16m8_m(...) __riscv_vwaddu_wx_u16m8_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u16mf2(...) __riscv_vwaddu_wx_u16mf2(__VA_ARGS__) |
| #define | vwaddu_wx_u16mf2_m(...) __riscv_vwaddu_wx_u16mf2_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u16mf4(...) __riscv_vwaddu_wx_u16mf4(__VA_ARGS__) |
| #define | vwaddu_wx_u16mf4_m(...) __riscv_vwaddu_wx_u16mf4_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u32m1(...) __riscv_vwaddu_wx_u32m1(__VA_ARGS__) |
| #define | vwaddu_wx_u32m1_m(...) __riscv_vwaddu_wx_u32m1_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u32m2(...) __riscv_vwaddu_wx_u32m2(__VA_ARGS__) |
| #define | vwaddu_wx_u32m2_m(...) __riscv_vwaddu_wx_u32m2_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u32m4(...) __riscv_vwaddu_wx_u32m4(__VA_ARGS__) |
| #define | vwaddu_wx_u32m4_m(...) __riscv_vwaddu_wx_u32m4_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u32m8(...) __riscv_vwaddu_wx_u32m8(__VA_ARGS__) |
| #define | vwaddu_wx_u32m8_m(...) __riscv_vwaddu_wx_u32m8_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u32mf2(...) __riscv_vwaddu_wx_u32mf2(__VA_ARGS__) |
| #define | vwaddu_wx_u32mf2_m(...) __riscv_vwaddu_wx_u32mf2_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u64m1(...) __riscv_vwaddu_wx_u64m1(__VA_ARGS__) |
| #define | vwaddu_wx_u64m1_m(...) __riscv_vwaddu_wx_u64m1_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u64m2(...) __riscv_vwaddu_wx_u64m2(__VA_ARGS__) |
| #define | vwaddu_wx_u64m2_m(...) __riscv_vwaddu_wx_u64m2_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u64m4(...) __riscv_vwaddu_wx_u64m4(__VA_ARGS__) |
| #define | vwaddu_wx_u64m4_m(...) __riscv_vwaddu_wx_u64m4_tumu(__VA_ARGS__) |
| #define | vwaddu_wx_u64m8(...) __riscv_vwaddu_wx_u64m8(__VA_ARGS__) |
| #define | vwaddu_wx_u64m8_m(...) __riscv_vwaddu_wx_u64m8_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16m1(...) __riscv_vwcvt_x_x_v_i16m1(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16m1_m(...) __riscv_vwcvt_x_x_v_i16m1_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16m2(...) __riscv_vwcvt_x_x_v_i16m2(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16m2_m(...) __riscv_vwcvt_x_x_v_i16m2_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16m4(...) __riscv_vwcvt_x_x_v_i16m4(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16m4_m(...) __riscv_vwcvt_x_x_v_i16m4_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16m8(...) __riscv_vwcvt_x_x_v_i16m8(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16m8_m(...) __riscv_vwcvt_x_x_v_i16m8_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16mf2(...) __riscv_vwcvt_x_x_v_i16mf2(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16mf2_m(...) __riscv_vwcvt_x_x_v_i16mf2_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16mf4(...) __riscv_vwcvt_x_x_v_i16mf4(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i16mf4_m(...) __riscv_vwcvt_x_x_v_i16mf4_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32m1(...) __riscv_vwcvt_x_x_v_i32m1(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32m1_m(...) __riscv_vwcvt_x_x_v_i32m1_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32m2(...) __riscv_vwcvt_x_x_v_i32m2(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32m2_m(...) __riscv_vwcvt_x_x_v_i32m2_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32m4(...) __riscv_vwcvt_x_x_v_i32m4(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32m4_m(...) __riscv_vwcvt_x_x_v_i32m4_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32m8(...) __riscv_vwcvt_x_x_v_i32m8(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32m8_m(...) __riscv_vwcvt_x_x_v_i32m8_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32mf2(...) __riscv_vwcvt_x_x_v_i32mf2(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i32mf2_m(...) __riscv_vwcvt_x_x_v_i32mf2_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i64m1(...) __riscv_vwcvt_x_x_v_i64m1(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i64m1_m(...) __riscv_vwcvt_x_x_v_i64m1_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i64m2(...) __riscv_vwcvt_x_x_v_i64m2(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i64m2_m(...) __riscv_vwcvt_x_x_v_i64m2_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i64m4(...) __riscv_vwcvt_x_x_v_i64m4(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i64m4_m(...) __riscv_vwcvt_x_x_v_i64m4_tumu(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i64m8(...) __riscv_vwcvt_x_x_v_i64m8(__VA_ARGS__) |
| #define | vwcvt_x_x_v_i64m8_m(...) __riscv_vwcvt_x_x_v_i64m8_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16m1(...) __riscv_vwcvtu_x_x_v_u16m1(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16m1_m(...) __riscv_vwcvtu_x_x_v_u16m1_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16m2(...) __riscv_vwcvtu_x_x_v_u16m2(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16m2_m(...) __riscv_vwcvtu_x_x_v_u16m2_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16m4(...) __riscv_vwcvtu_x_x_v_u16m4(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16m4_m(...) __riscv_vwcvtu_x_x_v_u16m4_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16m8(...) __riscv_vwcvtu_x_x_v_u16m8(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16m8_m(...) __riscv_vwcvtu_x_x_v_u16m8_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16mf2(...) __riscv_vwcvtu_x_x_v_u16mf2(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16mf2_m(...) __riscv_vwcvtu_x_x_v_u16mf2_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16mf4(...) __riscv_vwcvtu_x_x_v_u16mf4(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u16mf4_m(...) __riscv_vwcvtu_x_x_v_u16mf4_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32m1(...) __riscv_vwcvtu_x_x_v_u32m1(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32m1_m(...) __riscv_vwcvtu_x_x_v_u32m1_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32m2(...) __riscv_vwcvtu_x_x_v_u32m2(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32m2_m(...) __riscv_vwcvtu_x_x_v_u32m2_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32m4(...) __riscv_vwcvtu_x_x_v_u32m4(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32m4_m(...) __riscv_vwcvtu_x_x_v_u32m4_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32m8(...) __riscv_vwcvtu_x_x_v_u32m8(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32m8_m(...) __riscv_vwcvtu_x_x_v_u32m8_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32mf2(...) __riscv_vwcvtu_x_x_v_u32mf2(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u32mf2_m(...) __riscv_vwcvtu_x_x_v_u32mf2_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u64m1(...) __riscv_vwcvtu_x_x_v_u64m1(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u64m1_m(...) __riscv_vwcvtu_x_x_v_u64m1_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u64m2(...) __riscv_vwcvtu_x_x_v_u64m2(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u64m2_m(...) __riscv_vwcvtu_x_x_v_u64m2_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u64m4(...) __riscv_vwcvtu_x_x_v_u64m4(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u64m4_m(...) __riscv_vwcvtu_x_x_v_u64m4_tumu(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u64m8(...) __riscv_vwcvtu_x_x_v_u64m8(__VA_ARGS__) |
| #define | vwcvtu_x_x_v_u64m8_m(...) __riscv_vwcvtu_x_x_v_u64m8_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i16m1(...) __riscv_vwmacc_vv_i16m1_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i16m1_m(...) __riscv_vwmacc_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i16m2(...) __riscv_vwmacc_vv_i16m2_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i16m2_m(...) __riscv_vwmacc_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i16m4(...) __riscv_vwmacc_vv_i16m4_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i16m4_m(...) __riscv_vwmacc_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i16m8(...) __riscv_vwmacc_vv_i16m8_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i16m8_m(...) __riscv_vwmacc_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i16mf2(...) __riscv_vwmacc_vv_i16mf2_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i16mf2_m(...) __riscv_vwmacc_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i16mf4(...) __riscv_vwmacc_vv_i16mf4_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i16mf4_m(...) __riscv_vwmacc_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i32m1(...) __riscv_vwmacc_vv_i32m1_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i32m1_m(...) __riscv_vwmacc_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i32m2(...) __riscv_vwmacc_vv_i32m2_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i32m2_m(...) __riscv_vwmacc_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i32m4(...) __riscv_vwmacc_vv_i32m4_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i32m4_m(...) __riscv_vwmacc_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i32m8(...) __riscv_vwmacc_vv_i32m8_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i32m8_m(...) __riscv_vwmacc_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i32mf2(...) __riscv_vwmacc_vv_i32mf2_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i32mf2_m(...) __riscv_vwmacc_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i64m1(...) __riscv_vwmacc_vv_i64m1_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i64m1_m(...) __riscv_vwmacc_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i64m2(...) __riscv_vwmacc_vv_i64m2_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i64m2_m(...) __riscv_vwmacc_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i64m4(...) __riscv_vwmacc_vv_i64m4_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i64m4_m(...) __riscv_vwmacc_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vwmacc_vv_i64m8(...) __riscv_vwmacc_vv_i64m8_tu(__VA_ARGS__) |
| #define | vwmacc_vv_i64m8_m(...) __riscv_vwmacc_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i16m1(...) __riscv_vwmacc_vx_i16m1_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i16m1_m(...) __riscv_vwmacc_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i16m2(...) __riscv_vwmacc_vx_i16m2_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i16m2_m(...) __riscv_vwmacc_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i16m4(...) __riscv_vwmacc_vx_i16m4_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i16m4_m(...) __riscv_vwmacc_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i16m8(...) __riscv_vwmacc_vx_i16m8_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i16m8_m(...) __riscv_vwmacc_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i16mf2(...) __riscv_vwmacc_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i16mf2_m(...) __riscv_vwmacc_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i16mf4(...) __riscv_vwmacc_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i16mf4_m(...) __riscv_vwmacc_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i32m1(...) __riscv_vwmacc_vx_i32m1_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i32m1_m(...) __riscv_vwmacc_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i32m2(...) __riscv_vwmacc_vx_i32m2_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i32m2_m(...) __riscv_vwmacc_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i32m4(...) __riscv_vwmacc_vx_i32m4_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i32m4_m(...) __riscv_vwmacc_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i32m8(...) __riscv_vwmacc_vx_i32m8_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i32m8_m(...) __riscv_vwmacc_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i32mf2(...) __riscv_vwmacc_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i32mf2_m(...) __riscv_vwmacc_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i64m1(...) __riscv_vwmacc_vx_i64m1_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i64m1_m(...) __riscv_vwmacc_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i64m2(...) __riscv_vwmacc_vx_i64m2_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i64m2_m(...) __riscv_vwmacc_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i64m4(...) __riscv_vwmacc_vx_i64m4_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i64m4_m(...) __riscv_vwmacc_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vwmacc_vx_i64m8(...) __riscv_vwmacc_vx_i64m8_tu(__VA_ARGS__) |
| #define | vwmacc_vx_i64m8_m(...) __riscv_vwmacc_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16m1(...) __riscv_vwmaccsu_vv_i16m1_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16m1_m(...) __riscv_vwmaccsu_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16m2(...) __riscv_vwmaccsu_vv_i16m2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16m2_m(...) __riscv_vwmaccsu_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16m4(...) __riscv_vwmaccsu_vv_i16m4_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16m4_m(...) __riscv_vwmaccsu_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16m8(...) __riscv_vwmaccsu_vv_i16m8_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16m8_m(...) __riscv_vwmaccsu_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16mf2(...) __riscv_vwmaccsu_vv_i16mf2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16mf2_m(...) __riscv_vwmaccsu_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16mf4(...) __riscv_vwmaccsu_vv_i16mf4_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i16mf4_m(...) __riscv_vwmaccsu_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32m1(...) __riscv_vwmaccsu_vv_i32m1_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32m1_m(...) __riscv_vwmaccsu_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32m2(...) __riscv_vwmaccsu_vv_i32m2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32m2_m(...) __riscv_vwmaccsu_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32m4(...) __riscv_vwmaccsu_vv_i32m4_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32m4_m(...) __riscv_vwmaccsu_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32m8(...) __riscv_vwmaccsu_vv_i32m8_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32m8_m(...) __riscv_vwmaccsu_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32mf2(...) __riscv_vwmaccsu_vv_i32mf2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i32mf2_m(...) __riscv_vwmaccsu_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i64m1(...) __riscv_vwmaccsu_vv_i64m1_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i64m1_m(...) __riscv_vwmaccsu_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i64m2(...) __riscv_vwmaccsu_vv_i64m2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i64m2_m(...) __riscv_vwmaccsu_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i64m4(...) __riscv_vwmaccsu_vv_i64m4_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i64m4_m(...) __riscv_vwmaccsu_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i64m8(...) __riscv_vwmaccsu_vv_i64m8_tu(__VA_ARGS__) |
| #define | vwmaccsu_vv_i64m8_m(...) __riscv_vwmaccsu_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16m1(...) __riscv_vwmaccsu_vx_i16m1_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16m1_m(...) __riscv_vwmaccsu_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16m2(...) __riscv_vwmaccsu_vx_i16m2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16m2_m(...) __riscv_vwmaccsu_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16m4(...) __riscv_vwmaccsu_vx_i16m4_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16m4_m(...) __riscv_vwmaccsu_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16m8(...) __riscv_vwmaccsu_vx_i16m8_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16m8_m(...) __riscv_vwmaccsu_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16mf2(...) __riscv_vwmaccsu_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16mf2_m(...) __riscv_vwmaccsu_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16mf4(...) __riscv_vwmaccsu_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i16mf4_m(...) __riscv_vwmaccsu_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32m1(...) __riscv_vwmaccsu_vx_i32m1_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32m1_m(...) __riscv_vwmaccsu_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32m2(...) __riscv_vwmaccsu_vx_i32m2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32m2_m(...) __riscv_vwmaccsu_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32m4(...) __riscv_vwmaccsu_vx_i32m4_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32m4_m(...) __riscv_vwmaccsu_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32m8(...) __riscv_vwmaccsu_vx_i32m8_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32m8_m(...) __riscv_vwmaccsu_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32mf2(...) __riscv_vwmaccsu_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i32mf2_m(...) __riscv_vwmaccsu_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i64m1(...) __riscv_vwmaccsu_vx_i64m1_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i64m1_m(...) __riscv_vwmaccsu_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i64m2(...) __riscv_vwmaccsu_vx_i64m2_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i64m2_m(...) __riscv_vwmaccsu_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i64m4(...) __riscv_vwmaccsu_vx_i64m4_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i64m4_m(...) __riscv_vwmaccsu_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i64m8(...) __riscv_vwmaccsu_vx_i64m8_tu(__VA_ARGS__) |
| #define | vwmaccsu_vx_i64m8_m(...) __riscv_vwmaccsu_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16m1(...) __riscv_vwmaccu_vv_u16m1_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16m1_m(...) __riscv_vwmaccu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16m2(...) __riscv_vwmaccu_vv_u16m2_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16m2_m(...) __riscv_vwmaccu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16m4(...) __riscv_vwmaccu_vv_u16m4_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16m4_m(...) __riscv_vwmaccu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16m8(...) __riscv_vwmaccu_vv_u16m8_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16m8_m(...) __riscv_vwmaccu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16mf2(...) __riscv_vwmaccu_vv_u16mf2_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16mf2_m(...) __riscv_vwmaccu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16mf4(...) __riscv_vwmaccu_vv_u16mf4_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u16mf4_m(...) __riscv_vwmaccu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32m1(...) __riscv_vwmaccu_vv_u32m1_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32m1_m(...) __riscv_vwmaccu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32m2(...) __riscv_vwmaccu_vv_u32m2_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32m2_m(...) __riscv_vwmaccu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32m4(...) __riscv_vwmaccu_vv_u32m4_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32m4_m(...) __riscv_vwmaccu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32m8(...) __riscv_vwmaccu_vv_u32m8_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32m8_m(...) __riscv_vwmaccu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32mf2(...) __riscv_vwmaccu_vv_u32mf2_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u32mf2_m(...) __riscv_vwmaccu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u64m1(...) __riscv_vwmaccu_vv_u64m1_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u64m1_m(...) __riscv_vwmaccu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u64m2(...) __riscv_vwmaccu_vv_u64m2_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u64m2_m(...) __riscv_vwmaccu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u64m4(...) __riscv_vwmaccu_vv_u64m4_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u64m4_m(...) __riscv_vwmaccu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vwmaccu_vv_u64m8(...) __riscv_vwmaccu_vv_u64m8_tu(__VA_ARGS__) |
| #define | vwmaccu_vv_u64m8_m(...) __riscv_vwmaccu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16m1(...) __riscv_vwmaccu_vx_u16m1_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16m1_m(...) __riscv_vwmaccu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16m2(...) __riscv_vwmaccu_vx_u16m2_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16m2_m(...) __riscv_vwmaccu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16m4(...) __riscv_vwmaccu_vx_u16m4_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16m4_m(...) __riscv_vwmaccu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16m8(...) __riscv_vwmaccu_vx_u16m8_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16m8_m(...) __riscv_vwmaccu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16mf2(...) __riscv_vwmaccu_vx_u16mf2_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16mf2_m(...) __riscv_vwmaccu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16mf4(...) __riscv_vwmaccu_vx_u16mf4_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u16mf4_m(...) __riscv_vwmaccu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32m1(...) __riscv_vwmaccu_vx_u32m1_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32m1_m(...) __riscv_vwmaccu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32m2(...) __riscv_vwmaccu_vx_u32m2_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32m2_m(...) __riscv_vwmaccu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32m4(...) __riscv_vwmaccu_vx_u32m4_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32m4_m(...) __riscv_vwmaccu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32m8(...) __riscv_vwmaccu_vx_u32m8_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32m8_m(...) __riscv_vwmaccu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32mf2(...) __riscv_vwmaccu_vx_u32mf2_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u32mf2_m(...) __riscv_vwmaccu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u64m1(...) __riscv_vwmaccu_vx_u64m1_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u64m1_m(...) __riscv_vwmaccu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u64m2(...) __riscv_vwmaccu_vx_u64m2_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u64m2_m(...) __riscv_vwmaccu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u64m4(...) __riscv_vwmaccu_vx_u64m4_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u64m4_m(...) __riscv_vwmaccu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vwmaccu_vx_u64m8(...) __riscv_vwmaccu_vx_u64m8_tu(__VA_ARGS__) |
| #define | vwmaccu_vx_u64m8_m(...) __riscv_vwmaccu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16m1(...) __riscv_vwmaccus_vx_i16m1_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16m1_m(...) __riscv_vwmaccus_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16m2(...) __riscv_vwmaccus_vx_i16m2_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16m2_m(...) __riscv_vwmaccus_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16m4(...) __riscv_vwmaccus_vx_i16m4_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16m4_m(...) __riscv_vwmaccus_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16m8(...) __riscv_vwmaccus_vx_i16m8_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16m8_m(...) __riscv_vwmaccus_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16mf2(...) __riscv_vwmaccus_vx_i16mf2_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16mf2_m(...) __riscv_vwmaccus_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16mf4(...) __riscv_vwmaccus_vx_i16mf4_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i16mf4_m(...) __riscv_vwmaccus_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32m1(...) __riscv_vwmaccus_vx_i32m1_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32m1_m(...) __riscv_vwmaccus_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32m2(...) __riscv_vwmaccus_vx_i32m2_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32m2_m(...) __riscv_vwmaccus_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32m4(...) __riscv_vwmaccus_vx_i32m4_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32m4_m(...) __riscv_vwmaccus_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32m8(...) __riscv_vwmaccus_vx_i32m8_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32m8_m(...) __riscv_vwmaccus_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32mf2(...) __riscv_vwmaccus_vx_i32mf2_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i32mf2_m(...) __riscv_vwmaccus_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i64m1(...) __riscv_vwmaccus_vx_i64m1_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i64m1_m(...) __riscv_vwmaccus_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i64m2(...) __riscv_vwmaccus_vx_i64m2_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i64m2_m(...) __riscv_vwmaccus_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i64m4(...) __riscv_vwmaccus_vx_i64m4_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i64m4_m(...) __riscv_vwmaccus_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vwmaccus_vx_i64m8(...) __riscv_vwmaccus_vx_i64m8_tu(__VA_ARGS__) |
| #define | vwmaccus_vx_i64m8_m(...) __riscv_vwmaccus_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i16m1(...) __riscv_vwmul_vv_i16m1(__VA_ARGS__) |
| #define | vwmul_vv_i16m1_m(...) __riscv_vwmul_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i16m2(...) __riscv_vwmul_vv_i16m2(__VA_ARGS__) |
| #define | vwmul_vv_i16m2_m(...) __riscv_vwmul_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i16m4(...) __riscv_vwmul_vv_i16m4(__VA_ARGS__) |
| #define | vwmul_vv_i16m4_m(...) __riscv_vwmul_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i16m8(...) __riscv_vwmul_vv_i16m8(__VA_ARGS__) |
| #define | vwmul_vv_i16m8_m(...) __riscv_vwmul_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i16mf2(...) __riscv_vwmul_vv_i16mf2(__VA_ARGS__) |
| #define | vwmul_vv_i16mf2_m(...) __riscv_vwmul_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i16mf4(...) __riscv_vwmul_vv_i16mf4(__VA_ARGS__) |
| #define | vwmul_vv_i16mf4_m(...) __riscv_vwmul_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i32m1(...) __riscv_vwmul_vv_i32m1(__VA_ARGS__) |
| #define | vwmul_vv_i32m1_m(...) __riscv_vwmul_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i32m2(...) __riscv_vwmul_vv_i32m2(__VA_ARGS__) |
| #define | vwmul_vv_i32m2_m(...) __riscv_vwmul_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i32m4(...) __riscv_vwmul_vv_i32m4(__VA_ARGS__) |
| #define | vwmul_vv_i32m4_m(...) __riscv_vwmul_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i32m8(...) __riscv_vwmul_vv_i32m8(__VA_ARGS__) |
| #define | vwmul_vv_i32m8_m(...) __riscv_vwmul_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i32mf2(...) __riscv_vwmul_vv_i32mf2(__VA_ARGS__) |
| #define | vwmul_vv_i32mf2_m(...) __riscv_vwmul_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i64m1(...) __riscv_vwmul_vv_i64m1(__VA_ARGS__) |
| #define | vwmul_vv_i64m1_m(...) __riscv_vwmul_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i64m2(...) __riscv_vwmul_vv_i64m2(__VA_ARGS__) |
| #define | vwmul_vv_i64m2_m(...) __riscv_vwmul_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i64m4(...) __riscv_vwmul_vv_i64m4(__VA_ARGS__) |
| #define | vwmul_vv_i64m4_m(...) __riscv_vwmul_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vwmul_vv_i64m8(...) __riscv_vwmul_vv_i64m8(__VA_ARGS__) |
| #define | vwmul_vv_i64m8_m(...) __riscv_vwmul_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i16m1(...) __riscv_vwmul_vx_i16m1(__VA_ARGS__) |
| #define | vwmul_vx_i16m1_m(...) __riscv_vwmul_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i16m2(...) __riscv_vwmul_vx_i16m2(__VA_ARGS__) |
| #define | vwmul_vx_i16m2_m(...) __riscv_vwmul_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i16m4(...) __riscv_vwmul_vx_i16m4(__VA_ARGS__) |
| #define | vwmul_vx_i16m4_m(...) __riscv_vwmul_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i16m8(...) __riscv_vwmul_vx_i16m8(__VA_ARGS__) |
| #define | vwmul_vx_i16m8_m(...) __riscv_vwmul_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i16mf2(...) __riscv_vwmul_vx_i16mf2(__VA_ARGS__) |
| #define | vwmul_vx_i16mf2_m(...) __riscv_vwmul_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i16mf4(...) __riscv_vwmul_vx_i16mf4(__VA_ARGS__) |
| #define | vwmul_vx_i16mf4_m(...) __riscv_vwmul_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i32m1(...) __riscv_vwmul_vx_i32m1(__VA_ARGS__) |
| #define | vwmul_vx_i32m1_m(...) __riscv_vwmul_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i32m2(...) __riscv_vwmul_vx_i32m2(__VA_ARGS__) |
| #define | vwmul_vx_i32m2_m(...) __riscv_vwmul_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i32m4(...) __riscv_vwmul_vx_i32m4(__VA_ARGS__) |
| #define | vwmul_vx_i32m4_m(...) __riscv_vwmul_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i32m8(...) __riscv_vwmul_vx_i32m8(__VA_ARGS__) |
| #define | vwmul_vx_i32m8_m(...) __riscv_vwmul_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i32mf2(...) __riscv_vwmul_vx_i32mf2(__VA_ARGS__) |
| #define | vwmul_vx_i32mf2_m(...) __riscv_vwmul_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i64m1(...) __riscv_vwmul_vx_i64m1(__VA_ARGS__) |
| #define | vwmul_vx_i64m1_m(...) __riscv_vwmul_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i64m2(...) __riscv_vwmul_vx_i64m2(__VA_ARGS__) |
| #define | vwmul_vx_i64m2_m(...) __riscv_vwmul_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i64m4(...) __riscv_vwmul_vx_i64m4(__VA_ARGS__) |
| #define | vwmul_vx_i64m4_m(...) __riscv_vwmul_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vwmul_vx_i64m8(...) __riscv_vwmul_vx_i64m8(__VA_ARGS__) |
| #define | vwmul_vx_i64m8_m(...) __riscv_vwmul_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i16m1(...) __riscv_vwmulsu_vv_i16m1(__VA_ARGS__) |
| #define | vwmulsu_vv_i16m1_m(...) __riscv_vwmulsu_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i16m2(...) __riscv_vwmulsu_vv_i16m2(__VA_ARGS__) |
| #define | vwmulsu_vv_i16m2_m(...) __riscv_vwmulsu_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i16m4(...) __riscv_vwmulsu_vv_i16m4(__VA_ARGS__) |
| #define | vwmulsu_vv_i16m4_m(...) __riscv_vwmulsu_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i16m8(...) __riscv_vwmulsu_vv_i16m8(__VA_ARGS__) |
| #define | vwmulsu_vv_i16m8_m(...) __riscv_vwmulsu_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i16mf2(...) __riscv_vwmulsu_vv_i16mf2(__VA_ARGS__) |
| #define | vwmulsu_vv_i16mf2_m(...) __riscv_vwmulsu_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i16mf4(...) __riscv_vwmulsu_vv_i16mf4(__VA_ARGS__) |
| #define | vwmulsu_vv_i16mf4_m(...) __riscv_vwmulsu_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i32m1(...) __riscv_vwmulsu_vv_i32m1(__VA_ARGS__) |
| #define | vwmulsu_vv_i32m1_m(...) __riscv_vwmulsu_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i32m2(...) __riscv_vwmulsu_vv_i32m2(__VA_ARGS__) |
| #define | vwmulsu_vv_i32m2_m(...) __riscv_vwmulsu_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i32m4(...) __riscv_vwmulsu_vv_i32m4(__VA_ARGS__) |
| #define | vwmulsu_vv_i32m4_m(...) __riscv_vwmulsu_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i32m8(...) __riscv_vwmulsu_vv_i32m8(__VA_ARGS__) |
| #define | vwmulsu_vv_i32m8_m(...) __riscv_vwmulsu_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i32mf2(...) __riscv_vwmulsu_vv_i32mf2(__VA_ARGS__) |
| #define | vwmulsu_vv_i32mf2_m(...) __riscv_vwmulsu_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i64m1(...) __riscv_vwmulsu_vv_i64m1(__VA_ARGS__) |
| #define | vwmulsu_vv_i64m1_m(...) __riscv_vwmulsu_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i64m2(...) __riscv_vwmulsu_vv_i64m2(__VA_ARGS__) |
| #define | vwmulsu_vv_i64m2_m(...) __riscv_vwmulsu_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i64m4(...) __riscv_vwmulsu_vv_i64m4(__VA_ARGS__) |
| #define | vwmulsu_vv_i64m4_m(...) __riscv_vwmulsu_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vwmulsu_vv_i64m8(...) __riscv_vwmulsu_vv_i64m8(__VA_ARGS__) |
| #define | vwmulsu_vv_i64m8_m(...) __riscv_vwmulsu_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i16m1(...) __riscv_vwmulsu_vx_i16m1(__VA_ARGS__) |
| #define | vwmulsu_vx_i16m1_m(...) __riscv_vwmulsu_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i16m2(...) __riscv_vwmulsu_vx_i16m2(__VA_ARGS__) |
| #define | vwmulsu_vx_i16m2_m(...) __riscv_vwmulsu_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i16m4(...) __riscv_vwmulsu_vx_i16m4(__VA_ARGS__) |
| #define | vwmulsu_vx_i16m4_m(...) __riscv_vwmulsu_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i16m8(...) __riscv_vwmulsu_vx_i16m8(__VA_ARGS__) |
| #define | vwmulsu_vx_i16m8_m(...) __riscv_vwmulsu_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i16mf2(...) __riscv_vwmulsu_vx_i16mf2(__VA_ARGS__) |
| #define | vwmulsu_vx_i16mf2_m(...) __riscv_vwmulsu_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i16mf4(...) __riscv_vwmulsu_vx_i16mf4(__VA_ARGS__) |
| #define | vwmulsu_vx_i16mf4_m(...) __riscv_vwmulsu_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i32m1(...) __riscv_vwmulsu_vx_i32m1(__VA_ARGS__) |
| #define | vwmulsu_vx_i32m1_m(...) __riscv_vwmulsu_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i32m2(...) __riscv_vwmulsu_vx_i32m2(__VA_ARGS__) |
| #define | vwmulsu_vx_i32m2_m(...) __riscv_vwmulsu_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i32m4(...) __riscv_vwmulsu_vx_i32m4(__VA_ARGS__) |
| #define | vwmulsu_vx_i32m4_m(...) __riscv_vwmulsu_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i32m8(...) __riscv_vwmulsu_vx_i32m8(__VA_ARGS__) |
| #define | vwmulsu_vx_i32m8_m(...) __riscv_vwmulsu_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i32mf2(...) __riscv_vwmulsu_vx_i32mf2(__VA_ARGS__) |
| #define | vwmulsu_vx_i32mf2_m(...) __riscv_vwmulsu_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i64m1(...) __riscv_vwmulsu_vx_i64m1(__VA_ARGS__) |
| #define | vwmulsu_vx_i64m1_m(...) __riscv_vwmulsu_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i64m2(...) __riscv_vwmulsu_vx_i64m2(__VA_ARGS__) |
| #define | vwmulsu_vx_i64m2_m(...) __riscv_vwmulsu_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i64m4(...) __riscv_vwmulsu_vx_i64m4(__VA_ARGS__) |
| #define | vwmulsu_vx_i64m4_m(...) __riscv_vwmulsu_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vwmulsu_vx_i64m8(...) __riscv_vwmulsu_vx_i64m8(__VA_ARGS__) |
| #define | vwmulsu_vx_i64m8_m(...) __riscv_vwmulsu_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u16m1(...) __riscv_vwmulu_vv_u16m1(__VA_ARGS__) |
| #define | vwmulu_vv_u16m1_m(...) __riscv_vwmulu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u16m2(...) __riscv_vwmulu_vv_u16m2(__VA_ARGS__) |
| #define | vwmulu_vv_u16m2_m(...) __riscv_vwmulu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u16m4(...) __riscv_vwmulu_vv_u16m4(__VA_ARGS__) |
| #define | vwmulu_vv_u16m4_m(...) __riscv_vwmulu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u16m8(...) __riscv_vwmulu_vv_u16m8(__VA_ARGS__) |
| #define | vwmulu_vv_u16m8_m(...) __riscv_vwmulu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u16mf2(...) __riscv_vwmulu_vv_u16mf2(__VA_ARGS__) |
| #define | vwmulu_vv_u16mf2_m(...) __riscv_vwmulu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u16mf4(...) __riscv_vwmulu_vv_u16mf4(__VA_ARGS__) |
| #define | vwmulu_vv_u16mf4_m(...) __riscv_vwmulu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u32m1(...) __riscv_vwmulu_vv_u32m1(__VA_ARGS__) |
| #define | vwmulu_vv_u32m1_m(...) __riscv_vwmulu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u32m2(...) __riscv_vwmulu_vv_u32m2(__VA_ARGS__) |
| #define | vwmulu_vv_u32m2_m(...) __riscv_vwmulu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u32m4(...) __riscv_vwmulu_vv_u32m4(__VA_ARGS__) |
| #define | vwmulu_vv_u32m4_m(...) __riscv_vwmulu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u32m8(...) __riscv_vwmulu_vv_u32m8(__VA_ARGS__) |
| #define | vwmulu_vv_u32m8_m(...) __riscv_vwmulu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u32mf2(...) __riscv_vwmulu_vv_u32mf2(__VA_ARGS__) |
| #define | vwmulu_vv_u32mf2_m(...) __riscv_vwmulu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u64m1(...) __riscv_vwmulu_vv_u64m1(__VA_ARGS__) |
| #define | vwmulu_vv_u64m1_m(...) __riscv_vwmulu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u64m2(...) __riscv_vwmulu_vv_u64m2(__VA_ARGS__) |
| #define | vwmulu_vv_u64m2_m(...) __riscv_vwmulu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u64m4(...) __riscv_vwmulu_vv_u64m4(__VA_ARGS__) |
| #define | vwmulu_vv_u64m4_m(...) __riscv_vwmulu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vwmulu_vv_u64m8(...) __riscv_vwmulu_vv_u64m8(__VA_ARGS__) |
| #define | vwmulu_vv_u64m8_m(...) __riscv_vwmulu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u16m1(...) __riscv_vwmulu_vx_u16m1(__VA_ARGS__) |
| #define | vwmulu_vx_u16m1_m(...) __riscv_vwmulu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u16m2(...) __riscv_vwmulu_vx_u16m2(__VA_ARGS__) |
| #define | vwmulu_vx_u16m2_m(...) __riscv_vwmulu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u16m4(...) __riscv_vwmulu_vx_u16m4(__VA_ARGS__) |
| #define | vwmulu_vx_u16m4_m(...) __riscv_vwmulu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u16m8(...) __riscv_vwmulu_vx_u16m8(__VA_ARGS__) |
| #define | vwmulu_vx_u16m8_m(...) __riscv_vwmulu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u16mf2(...) __riscv_vwmulu_vx_u16mf2(__VA_ARGS__) |
| #define | vwmulu_vx_u16mf2_m(...) __riscv_vwmulu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u16mf4(...) __riscv_vwmulu_vx_u16mf4(__VA_ARGS__) |
| #define | vwmulu_vx_u16mf4_m(...) __riscv_vwmulu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u32m1(...) __riscv_vwmulu_vx_u32m1(__VA_ARGS__) |
| #define | vwmulu_vx_u32m1_m(...) __riscv_vwmulu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u32m2(...) __riscv_vwmulu_vx_u32m2(__VA_ARGS__) |
| #define | vwmulu_vx_u32m2_m(...) __riscv_vwmulu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u32m4(...) __riscv_vwmulu_vx_u32m4(__VA_ARGS__) |
| #define | vwmulu_vx_u32m4_m(...) __riscv_vwmulu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u32m8(...) __riscv_vwmulu_vx_u32m8(__VA_ARGS__) |
| #define | vwmulu_vx_u32m8_m(...) __riscv_vwmulu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u32mf2(...) __riscv_vwmulu_vx_u32mf2(__VA_ARGS__) |
| #define | vwmulu_vx_u32mf2_m(...) __riscv_vwmulu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u64m1(...) __riscv_vwmulu_vx_u64m1(__VA_ARGS__) |
| #define | vwmulu_vx_u64m1_m(...) __riscv_vwmulu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u64m2(...) __riscv_vwmulu_vx_u64m2(__VA_ARGS__) |
| #define | vwmulu_vx_u64m2_m(...) __riscv_vwmulu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u64m4(...) __riscv_vwmulu_vx_u64m4(__VA_ARGS__) |
| #define | vwmulu_vx_u64m4_m(...) __riscv_vwmulu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vwmulu_vx_u64m8(...) __riscv_vwmulu_vx_u64m8(__VA_ARGS__) |
| #define | vwmulu_vx_u64m8_m(...) __riscv_vwmulu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vwredsum_vs_i16m1_i32m1(...) __riscv_vwredsum_vs_i16m1_i32m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i16m1_i32m1_m(...) __riscv_vwredsum_vs_i16m1_i32m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i16m2_i32m1(...) __riscv_vwredsum_vs_i16m2_i32m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i16m2_i32m1_m(...) __riscv_vwredsum_vs_i16m2_i32m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i16m4_i32m1(...) __riscv_vwredsum_vs_i16m4_i32m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i16m4_i32m1_m(...) __riscv_vwredsum_vs_i16m4_i32m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i16m8_i32m1(...) __riscv_vwredsum_vs_i16m8_i32m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i16m8_i32m1_m(...) __riscv_vwredsum_vs_i16m8_i32m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i16mf2_i32m1(...) __riscv_vwredsum_vs_i16mf2_i32m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i16mf2_i32m1_m(...) __riscv_vwredsum_vs_i16mf2_i32m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i16mf4_i32m1(...) __riscv_vwredsum_vs_i16mf4_i32m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i16mf4_i32m1_m(...) __riscv_vwredsum_vs_i16mf4_i32m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i32m1_i64m1(...) __riscv_vwredsum_vs_i32m1_i64m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i32m1_i64m1_m(...) __riscv_vwredsum_vs_i32m1_i64m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i32m2_i64m1(...) __riscv_vwredsum_vs_i32m2_i64m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i32m2_i64m1_m(...) __riscv_vwredsum_vs_i32m2_i64m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i32m4_i64m1(...) __riscv_vwredsum_vs_i32m4_i64m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i32m4_i64m1_m(...) __riscv_vwredsum_vs_i32m4_i64m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i32m8_i64m1(...) __riscv_vwredsum_vs_i32m8_i64m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i32m8_i64m1_m(...) __riscv_vwredsum_vs_i32m8_i64m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i32mf2_i64m1(...) __riscv_vwredsum_vs_i32mf2_i64m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i32mf2_i64m1_m(...) __riscv_vwredsum_vs_i32mf2_i64m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i8m1_i16m1(...) __riscv_vwredsum_vs_i8m1_i16m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i8m1_i16m1_m(...) __riscv_vwredsum_vs_i8m1_i16m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i8m2_i16m1(...) __riscv_vwredsum_vs_i8m2_i16m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i8m2_i16m1_m(...) __riscv_vwredsum_vs_i8m2_i16m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i8m4_i16m1(...) __riscv_vwredsum_vs_i8m4_i16m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i8m4_i16m1_m(...) __riscv_vwredsum_vs_i8m4_i16m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i8m8_i16m1(...) __riscv_vwredsum_vs_i8m8_i16m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i8m8_i16m1_m(...) __riscv_vwredsum_vs_i8m8_i16m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i8mf2_i16m1(...) __riscv_vwredsum_vs_i8mf2_i16m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i8mf2_i16m1_m(...) __riscv_vwredsum_vs_i8mf2_i16m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i8mf4_i16m1(...) __riscv_vwredsum_vs_i8mf4_i16m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i8mf4_i16m1_m(...) __riscv_vwredsum_vs_i8mf4_i16m1_tum(__VA_ARGS__) |
| #define | vwredsum_vs_i8mf8_i16m1(...) __riscv_vwredsum_vs_i8mf8_i16m1_tu(__VA_ARGS__) |
| #define | vwredsum_vs_i8mf8_i16m1_m(...) __riscv_vwredsum_vs_i8mf8_i16m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u16m1_u32m1(...) __riscv_vwredsumu_vs_u16m1_u32m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u16m1_u32m1_m(...) __riscv_vwredsumu_vs_u16m1_u32m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u16m2_u32m1(...) __riscv_vwredsumu_vs_u16m2_u32m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u16m2_u32m1_m(...) __riscv_vwredsumu_vs_u16m2_u32m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u16m4_u32m1(...) __riscv_vwredsumu_vs_u16m4_u32m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u16m4_u32m1_m(...) __riscv_vwredsumu_vs_u16m4_u32m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u16m8_u32m1(...) __riscv_vwredsumu_vs_u16m8_u32m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u16m8_u32m1_m(...) __riscv_vwredsumu_vs_u16m8_u32m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u16mf2_u32m1(...) __riscv_vwredsumu_vs_u16mf2_u32m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u16mf2_u32m1_m(...) __riscv_vwredsumu_vs_u16mf2_u32m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u16mf4_u32m1(...) __riscv_vwredsumu_vs_u16mf4_u32m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u16mf4_u32m1_m(...) __riscv_vwredsumu_vs_u16mf4_u32m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u32m1_u64m1(...) __riscv_vwredsumu_vs_u32m1_u64m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u32m1_u64m1_m(...) __riscv_vwredsumu_vs_u32m1_u64m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u32m2_u64m1(...) __riscv_vwredsumu_vs_u32m2_u64m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u32m2_u64m1_m(...) __riscv_vwredsumu_vs_u32m2_u64m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u32m4_u64m1(...) __riscv_vwredsumu_vs_u32m4_u64m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u32m4_u64m1_m(...) __riscv_vwredsumu_vs_u32m4_u64m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u32m8_u64m1(...) __riscv_vwredsumu_vs_u32m8_u64m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u32m8_u64m1_m(...) __riscv_vwredsumu_vs_u32m8_u64m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u32mf2_u64m1(...) __riscv_vwredsumu_vs_u32mf2_u64m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u32mf2_u64m1_m(...) __riscv_vwredsumu_vs_u32mf2_u64m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u8m1_u16m1(...) __riscv_vwredsumu_vs_u8m1_u16m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u8m1_u16m1_m(...) __riscv_vwredsumu_vs_u8m1_u16m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u8m2_u16m1(...) __riscv_vwredsumu_vs_u8m2_u16m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u8m2_u16m1_m(...) __riscv_vwredsumu_vs_u8m2_u16m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u8m4_u16m1(...) __riscv_vwredsumu_vs_u8m4_u16m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u8m4_u16m1_m(...) __riscv_vwredsumu_vs_u8m4_u16m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u8m8_u16m1(...) __riscv_vwredsumu_vs_u8m8_u16m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u8m8_u16m1_m(...) __riscv_vwredsumu_vs_u8m8_u16m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u8mf2_u16m1(...) __riscv_vwredsumu_vs_u8mf2_u16m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u8mf2_u16m1_m(...) __riscv_vwredsumu_vs_u8mf2_u16m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u8mf4_u16m1(...) __riscv_vwredsumu_vs_u8mf4_u16m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u8mf4_u16m1_m(...) __riscv_vwredsumu_vs_u8mf4_u16m1_tum(__VA_ARGS__) |
| #define | vwredsumu_vs_u8mf8_u16m1(...) __riscv_vwredsumu_vs_u8mf8_u16m1_tu(__VA_ARGS__) |
| #define | vwredsumu_vs_u8mf8_u16m1_m(...) __riscv_vwredsumu_vs_u8mf8_u16m1_tum(__VA_ARGS__) |
| #define | vwsub_vv_i16m1(...) __riscv_vwsub_vv_i16m1(__VA_ARGS__) |
| #define | vwsub_vv_i16m1_m(...) __riscv_vwsub_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i16m2(...) __riscv_vwsub_vv_i16m2(__VA_ARGS__) |
| #define | vwsub_vv_i16m2_m(...) __riscv_vwsub_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i16m4(...) __riscv_vwsub_vv_i16m4(__VA_ARGS__) |
| #define | vwsub_vv_i16m4_m(...) __riscv_vwsub_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i16m8(...) __riscv_vwsub_vv_i16m8(__VA_ARGS__) |
| #define | vwsub_vv_i16m8_m(...) __riscv_vwsub_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i16mf2(...) __riscv_vwsub_vv_i16mf2(__VA_ARGS__) |
| #define | vwsub_vv_i16mf2_m(...) __riscv_vwsub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i16mf4(...) __riscv_vwsub_vv_i16mf4(__VA_ARGS__) |
| #define | vwsub_vv_i16mf4_m(...) __riscv_vwsub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i32m1(...) __riscv_vwsub_vv_i32m1(__VA_ARGS__) |
| #define | vwsub_vv_i32m1_m(...) __riscv_vwsub_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i32m2(...) __riscv_vwsub_vv_i32m2(__VA_ARGS__) |
| #define | vwsub_vv_i32m2_m(...) __riscv_vwsub_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i32m4(...) __riscv_vwsub_vv_i32m4(__VA_ARGS__) |
| #define | vwsub_vv_i32m4_m(...) __riscv_vwsub_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i32m8(...) __riscv_vwsub_vv_i32m8(__VA_ARGS__) |
| #define | vwsub_vv_i32m8_m(...) __riscv_vwsub_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i32mf2(...) __riscv_vwsub_vv_i32mf2(__VA_ARGS__) |
| #define | vwsub_vv_i32mf2_m(...) __riscv_vwsub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i64m1(...) __riscv_vwsub_vv_i64m1(__VA_ARGS__) |
| #define | vwsub_vv_i64m1_m(...) __riscv_vwsub_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i64m2(...) __riscv_vwsub_vv_i64m2(__VA_ARGS__) |
| #define | vwsub_vv_i64m2_m(...) __riscv_vwsub_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i64m4(...) __riscv_vwsub_vv_i64m4(__VA_ARGS__) |
| #define | vwsub_vv_i64m4_m(...) __riscv_vwsub_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vwsub_vv_i64m8(...) __riscv_vwsub_vv_i64m8(__VA_ARGS__) |
| #define | vwsub_vv_i64m8_m(...) __riscv_vwsub_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i16m1(...) __riscv_vwsub_vx_i16m1(__VA_ARGS__) |
| #define | vwsub_vx_i16m1_m(...) __riscv_vwsub_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i16m2(...) __riscv_vwsub_vx_i16m2(__VA_ARGS__) |
| #define | vwsub_vx_i16m2_m(...) __riscv_vwsub_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i16m4(...) __riscv_vwsub_vx_i16m4(__VA_ARGS__) |
| #define | vwsub_vx_i16m4_m(...) __riscv_vwsub_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i16m8(...) __riscv_vwsub_vx_i16m8(__VA_ARGS__) |
| #define | vwsub_vx_i16m8_m(...) __riscv_vwsub_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i16mf2(...) __riscv_vwsub_vx_i16mf2(__VA_ARGS__) |
| #define | vwsub_vx_i16mf2_m(...) __riscv_vwsub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i16mf4(...) __riscv_vwsub_vx_i16mf4(__VA_ARGS__) |
| #define | vwsub_vx_i16mf4_m(...) __riscv_vwsub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i32m1(...) __riscv_vwsub_vx_i32m1(__VA_ARGS__) |
| #define | vwsub_vx_i32m1_m(...) __riscv_vwsub_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i32m2(...) __riscv_vwsub_vx_i32m2(__VA_ARGS__) |
| #define | vwsub_vx_i32m2_m(...) __riscv_vwsub_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i32m4(...) __riscv_vwsub_vx_i32m4(__VA_ARGS__) |
| #define | vwsub_vx_i32m4_m(...) __riscv_vwsub_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i32m8(...) __riscv_vwsub_vx_i32m8(__VA_ARGS__) |
| #define | vwsub_vx_i32m8_m(...) __riscv_vwsub_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i32mf2(...) __riscv_vwsub_vx_i32mf2(__VA_ARGS__) |
| #define | vwsub_vx_i32mf2_m(...) __riscv_vwsub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i64m1(...) __riscv_vwsub_vx_i64m1(__VA_ARGS__) |
| #define | vwsub_vx_i64m1_m(...) __riscv_vwsub_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i64m2(...) __riscv_vwsub_vx_i64m2(__VA_ARGS__) |
| #define | vwsub_vx_i64m2_m(...) __riscv_vwsub_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i64m4(...) __riscv_vwsub_vx_i64m4(__VA_ARGS__) |
| #define | vwsub_vx_i64m4_m(...) __riscv_vwsub_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vwsub_vx_i64m8(...) __riscv_vwsub_vx_i64m8(__VA_ARGS__) |
| #define | vwsub_vx_i64m8_m(...) __riscv_vwsub_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i16m1(...) __riscv_vwsub_wv_i16m1(__VA_ARGS__) |
| #define | vwsub_wv_i16m1_m(...) __riscv_vwsub_wv_i16m1_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i16m2(...) __riscv_vwsub_wv_i16m2(__VA_ARGS__) |
| #define | vwsub_wv_i16m2_m(...) __riscv_vwsub_wv_i16m2_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i16m4(...) __riscv_vwsub_wv_i16m4(__VA_ARGS__) |
| #define | vwsub_wv_i16m4_m(...) __riscv_vwsub_wv_i16m4_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i16m8(...) __riscv_vwsub_wv_i16m8(__VA_ARGS__) |
| #define | vwsub_wv_i16m8_m(...) __riscv_vwsub_wv_i16m8_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i16mf2(...) __riscv_vwsub_wv_i16mf2(__VA_ARGS__) |
| #define | vwsub_wv_i16mf2_m(...) __riscv_vwsub_wv_i16mf2_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i16mf4(...) __riscv_vwsub_wv_i16mf4(__VA_ARGS__) |
| #define | vwsub_wv_i16mf4_m(...) __riscv_vwsub_wv_i16mf4_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i32m1(...) __riscv_vwsub_wv_i32m1(__VA_ARGS__) |
| #define | vwsub_wv_i32m1_m(...) __riscv_vwsub_wv_i32m1_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i32m2(...) __riscv_vwsub_wv_i32m2(__VA_ARGS__) |
| #define | vwsub_wv_i32m2_m(...) __riscv_vwsub_wv_i32m2_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i32m4(...) __riscv_vwsub_wv_i32m4(__VA_ARGS__) |
| #define | vwsub_wv_i32m4_m(...) __riscv_vwsub_wv_i32m4_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i32m8(...) __riscv_vwsub_wv_i32m8(__VA_ARGS__) |
| #define | vwsub_wv_i32m8_m(...) __riscv_vwsub_wv_i32m8_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i32mf2(...) __riscv_vwsub_wv_i32mf2(__VA_ARGS__) |
| #define | vwsub_wv_i32mf2_m(...) __riscv_vwsub_wv_i32mf2_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i64m1(...) __riscv_vwsub_wv_i64m1(__VA_ARGS__) |
| #define | vwsub_wv_i64m1_m(...) __riscv_vwsub_wv_i64m1_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i64m2(...) __riscv_vwsub_wv_i64m2(__VA_ARGS__) |
| #define | vwsub_wv_i64m2_m(...) __riscv_vwsub_wv_i64m2_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i64m4(...) __riscv_vwsub_wv_i64m4(__VA_ARGS__) |
| #define | vwsub_wv_i64m4_m(...) __riscv_vwsub_wv_i64m4_tumu(__VA_ARGS__) |
| #define | vwsub_wv_i64m8(...) __riscv_vwsub_wv_i64m8(__VA_ARGS__) |
| #define | vwsub_wv_i64m8_m(...) __riscv_vwsub_wv_i64m8_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i16m1(...) __riscv_vwsub_wx_i16m1(__VA_ARGS__) |
| #define | vwsub_wx_i16m1_m(...) __riscv_vwsub_wx_i16m1_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i16m2(...) __riscv_vwsub_wx_i16m2(__VA_ARGS__) |
| #define | vwsub_wx_i16m2_m(...) __riscv_vwsub_wx_i16m2_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i16m4(...) __riscv_vwsub_wx_i16m4(__VA_ARGS__) |
| #define | vwsub_wx_i16m4_m(...) __riscv_vwsub_wx_i16m4_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i16m8(...) __riscv_vwsub_wx_i16m8(__VA_ARGS__) |
| #define | vwsub_wx_i16m8_m(...) __riscv_vwsub_wx_i16m8_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i16mf2(...) __riscv_vwsub_wx_i16mf2(__VA_ARGS__) |
| #define | vwsub_wx_i16mf2_m(...) __riscv_vwsub_wx_i16mf2_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i16mf4(...) __riscv_vwsub_wx_i16mf4(__VA_ARGS__) |
| #define | vwsub_wx_i16mf4_m(...) __riscv_vwsub_wx_i16mf4_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i32m1(...) __riscv_vwsub_wx_i32m1(__VA_ARGS__) |
| #define | vwsub_wx_i32m1_m(...) __riscv_vwsub_wx_i32m1_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i32m2(...) __riscv_vwsub_wx_i32m2(__VA_ARGS__) |
| #define | vwsub_wx_i32m2_m(...) __riscv_vwsub_wx_i32m2_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i32m4(...) __riscv_vwsub_wx_i32m4(__VA_ARGS__) |
| #define | vwsub_wx_i32m4_m(...) __riscv_vwsub_wx_i32m4_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i32m8(...) __riscv_vwsub_wx_i32m8(__VA_ARGS__) |
| #define | vwsub_wx_i32m8_m(...) __riscv_vwsub_wx_i32m8_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i32mf2(...) __riscv_vwsub_wx_i32mf2(__VA_ARGS__) |
| #define | vwsub_wx_i32mf2_m(...) __riscv_vwsub_wx_i32mf2_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i64m1(...) __riscv_vwsub_wx_i64m1(__VA_ARGS__) |
| #define | vwsub_wx_i64m1_m(...) __riscv_vwsub_wx_i64m1_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i64m2(...) __riscv_vwsub_wx_i64m2(__VA_ARGS__) |
| #define | vwsub_wx_i64m2_m(...) __riscv_vwsub_wx_i64m2_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i64m4(...) __riscv_vwsub_wx_i64m4(__VA_ARGS__) |
| #define | vwsub_wx_i64m4_m(...) __riscv_vwsub_wx_i64m4_tumu(__VA_ARGS__) |
| #define | vwsub_wx_i64m8(...) __riscv_vwsub_wx_i64m8(__VA_ARGS__) |
| #define | vwsub_wx_i64m8_m(...) __riscv_vwsub_wx_i64m8_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u16m1(...) __riscv_vwsubu_vv_u16m1(__VA_ARGS__) |
| #define | vwsubu_vv_u16m1_m(...) __riscv_vwsubu_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u16m2(...) __riscv_vwsubu_vv_u16m2(__VA_ARGS__) |
| #define | vwsubu_vv_u16m2_m(...) __riscv_vwsubu_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u16m4(...) __riscv_vwsubu_vv_u16m4(__VA_ARGS__) |
| #define | vwsubu_vv_u16m4_m(...) __riscv_vwsubu_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u16m8(...) __riscv_vwsubu_vv_u16m8(__VA_ARGS__) |
| #define | vwsubu_vv_u16m8_m(...) __riscv_vwsubu_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u16mf2(...) __riscv_vwsubu_vv_u16mf2(__VA_ARGS__) |
| #define | vwsubu_vv_u16mf2_m(...) __riscv_vwsubu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u16mf4(...) __riscv_vwsubu_vv_u16mf4(__VA_ARGS__) |
| #define | vwsubu_vv_u16mf4_m(...) __riscv_vwsubu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u32m1(...) __riscv_vwsubu_vv_u32m1(__VA_ARGS__) |
| #define | vwsubu_vv_u32m1_m(...) __riscv_vwsubu_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u32m2(...) __riscv_vwsubu_vv_u32m2(__VA_ARGS__) |
| #define | vwsubu_vv_u32m2_m(...) __riscv_vwsubu_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u32m4(...) __riscv_vwsubu_vv_u32m4(__VA_ARGS__) |
| #define | vwsubu_vv_u32m4_m(...) __riscv_vwsubu_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u32m8(...) __riscv_vwsubu_vv_u32m8(__VA_ARGS__) |
| #define | vwsubu_vv_u32m8_m(...) __riscv_vwsubu_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u32mf2(...) __riscv_vwsubu_vv_u32mf2(__VA_ARGS__) |
| #define | vwsubu_vv_u32mf2_m(...) __riscv_vwsubu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u64m1(...) __riscv_vwsubu_vv_u64m1(__VA_ARGS__) |
| #define | vwsubu_vv_u64m1_m(...) __riscv_vwsubu_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u64m2(...) __riscv_vwsubu_vv_u64m2(__VA_ARGS__) |
| #define | vwsubu_vv_u64m2_m(...) __riscv_vwsubu_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u64m4(...) __riscv_vwsubu_vv_u64m4(__VA_ARGS__) |
| #define | vwsubu_vv_u64m4_m(...) __riscv_vwsubu_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vwsubu_vv_u64m8(...) __riscv_vwsubu_vv_u64m8(__VA_ARGS__) |
| #define | vwsubu_vv_u64m8_m(...) __riscv_vwsubu_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u16m1(...) __riscv_vwsubu_vx_u16m1(__VA_ARGS__) |
| #define | vwsubu_vx_u16m1_m(...) __riscv_vwsubu_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u16m2(...) __riscv_vwsubu_vx_u16m2(__VA_ARGS__) |
| #define | vwsubu_vx_u16m2_m(...) __riscv_vwsubu_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u16m4(...) __riscv_vwsubu_vx_u16m4(__VA_ARGS__) |
| #define | vwsubu_vx_u16m4_m(...) __riscv_vwsubu_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u16m8(...) __riscv_vwsubu_vx_u16m8(__VA_ARGS__) |
| #define | vwsubu_vx_u16m8_m(...) __riscv_vwsubu_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u16mf2(...) __riscv_vwsubu_vx_u16mf2(__VA_ARGS__) |
| #define | vwsubu_vx_u16mf2_m(...) __riscv_vwsubu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u16mf4(...) __riscv_vwsubu_vx_u16mf4(__VA_ARGS__) |
| #define | vwsubu_vx_u16mf4_m(...) __riscv_vwsubu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u32m1(...) __riscv_vwsubu_vx_u32m1(__VA_ARGS__) |
| #define | vwsubu_vx_u32m1_m(...) __riscv_vwsubu_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u32m2(...) __riscv_vwsubu_vx_u32m2(__VA_ARGS__) |
| #define | vwsubu_vx_u32m2_m(...) __riscv_vwsubu_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u32m4(...) __riscv_vwsubu_vx_u32m4(__VA_ARGS__) |
| #define | vwsubu_vx_u32m4_m(...) __riscv_vwsubu_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u32m8(...) __riscv_vwsubu_vx_u32m8(__VA_ARGS__) |
| #define | vwsubu_vx_u32m8_m(...) __riscv_vwsubu_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u32mf2(...) __riscv_vwsubu_vx_u32mf2(__VA_ARGS__) |
| #define | vwsubu_vx_u32mf2_m(...) __riscv_vwsubu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u64m1(...) __riscv_vwsubu_vx_u64m1(__VA_ARGS__) |
| #define | vwsubu_vx_u64m1_m(...) __riscv_vwsubu_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u64m2(...) __riscv_vwsubu_vx_u64m2(__VA_ARGS__) |
| #define | vwsubu_vx_u64m2_m(...) __riscv_vwsubu_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u64m4(...) __riscv_vwsubu_vx_u64m4(__VA_ARGS__) |
| #define | vwsubu_vx_u64m4_m(...) __riscv_vwsubu_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vwsubu_vx_u64m8(...) __riscv_vwsubu_vx_u64m8(__VA_ARGS__) |
| #define | vwsubu_vx_u64m8_m(...) __riscv_vwsubu_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u16m1(...) __riscv_vwsubu_wv_u16m1(__VA_ARGS__) |
| #define | vwsubu_wv_u16m1_m(...) __riscv_vwsubu_wv_u16m1_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u16m2(...) __riscv_vwsubu_wv_u16m2(__VA_ARGS__) |
| #define | vwsubu_wv_u16m2_m(...) __riscv_vwsubu_wv_u16m2_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u16m4(...) __riscv_vwsubu_wv_u16m4(__VA_ARGS__) |
| #define | vwsubu_wv_u16m4_m(...) __riscv_vwsubu_wv_u16m4_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u16m8(...) __riscv_vwsubu_wv_u16m8(__VA_ARGS__) |
| #define | vwsubu_wv_u16m8_m(...) __riscv_vwsubu_wv_u16m8_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u16mf2(...) __riscv_vwsubu_wv_u16mf2(__VA_ARGS__) |
| #define | vwsubu_wv_u16mf2_m(...) __riscv_vwsubu_wv_u16mf2_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u16mf4(...) __riscv_vwsubu_wv_u16mf4(__VA_ARGS__) |
| #define | vwsubu_wv_u16mf4_m(...) __riscv_vwsubu_wv_u16mf4_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u32m1(...) __riscv_vwsubu_wv_u32m1(__VA_ARGS__) |
| #define | vwsubu_wv_u32m1_m(...) __riscv_vwsubu_wv_u32m1_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u32m2(...) __riscv_vwsubu_wv_u32m2(__VA_ARGS__) |
| #define | vwsubu_wv_u32m2_m(...) __riscv_vwsubu_wv_u32m2_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u32m4(...) __riscv_vwsubu_wv_u32m4(__VA_ARGS__) |
| #define | vwsubu_wv_u32m4_m(...) __riscv_vwsubu_wv_u32m4_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u32m8(...) __riscv_vwsubu_wv_u32m8(__VA_ARGS__) |
| #define | vwsubu_wv_u32m8_m(...) __riscv_vwsubu_wv_u32m8_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u32mf2(...) __riscv_vwsubu_wv_u32mf2(__VA_ARGS__) |
| #define | vwsubu_wv_u32mf2_m(...) __riscv_vwsubu_wv_u32mf2_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u64m1(...) __riscv_vwsubu_wv_u64m1(__VA_ARGS__) |
| #define | vwsubu_wv_u64m1_m(...) __riscv_vwsubu_wv_u64m1_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u64m2(...) __riscv_vwsubu_wv_u64m2(__VA_ARGS__) |
| #define | vwsubu_wv_u64m2_m(...) __riscv_vwsubu_wv_u64m2_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u64m4(...) __riscv_vwsubu_wv_u64m4(__VA_ARGS__) |
| #define | vwsubu_wv_u64m4_m(...) __riscv_vwsubu_wv_u64m4_tumu(__VA_ARGS__) |
| #define | vwsubu_wv_u64m8(...) __riscv_vwsubu_wv_u64m8(__VA_ARGS__) |
| #define | vwsubu_wv_u64m8_m(...) __riscv_vwsubu_wv_u64m8_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u16m1(...) __riscv_vwsubu_wx_u16m1(__VA_ARGS__) |
| #define | vwsubu_wx_u16m1_m(...) __riscv_vwsubu_wx_u16m1_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u16m2(...) __riscv_vwsubu_wx_u16m2(__VA_ARGS__) |
| #define | vwsubu_wx_u16m2_m(...) __riscv_vwsubu_wx_u16m2_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u16m4(...) __riscv_vwsubu_wx_u16m4(__VA_ARGS__) |
| #define | vwsubu_wx_u16m4_m(...) __riscv_vwsubu_wx_u16m4_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u16m8(...) __riscv_vwsubu_wx_u16m8(__VA_ARGS__) |
| #define | vwsubu_wx_u16m8_m(...) __riscv_vwsubu_wx_u16m8_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u16mf2(...) __riscv_vwsubu_wx_u16mf2(__VA_ARGS__) |
| #define | vwsubu_wx_u16mf2_m(...) __riscv_vwsubu_wx_u16mf2_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u16mf4(...) __riscv_vwsubu_wx_u16mf4(__VA_ARGS__) |
| #define | vwsubu_wx_u16mf4_m(...) __riscv_vwsubu_wx_u16mf4_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u32m1(...) __riscv_vwsubu_wx_u32m1(__VA_ARGS__) |
| #define | vwsubu_wx_u32m1_m(...) __riscv_vwsubu_wx_u32m1_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u32m2(...) __riscv_vwsubu_wx_u32m2(__VA_ARGS__) |
| #define | vwsubu_wx_u32m2_m(...) __riscv_vwsubu_wx_u32m2_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u32m4(...) __riscv_vwsubu_wx_u32m4(__VA_ARGS__) |
| #define | vwsubu_wx_u32m4_m(...) __riscv_vwsubu_wx_u32m4_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u32m8(...) __riscv_vwsubu_wx_u32m8(__VA_ARGS__) |
| #define | vwsubu_wx_u32m8_m(...) __riscv_vwsubu_wx_u32m8_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u32mf2(...) __riscv_vwsubu_wx_u32mf2(__VA_ARGS__) |
| #define | vwsubu_wx_u32mf2_m(...) __riscv_vwsubu_wx_u32mf2_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u64m1(...) __riscv_vwsubu_wx_u64m1(__VA_ARGS__) |
| #define | vwsubu_wx_u64m1_m(...) __riscv_vwsubu_wx_u64m1_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u64m2(...) __riscv_vwsubu_wx_u64m2(__VA_ARGS__) |
| #define | vwsubu_wx_u64m2_m(...) __riscv_vwsubu_wx_u64m2_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u64m4(...) __riscv_vwsubu_wx_u64m4(__VA_ARGS__) |
| #define | vwsubu_wx_u64m4_m(...) __riscv_vwsubu_wx_u64m4_tumu(__VA_ARGS__) |
| #define | vwsubu_wx_u64m8(...) __riscv_vwsubu_wx_u64m8(__VA_ARGS__) |
| #define | vwsubu_wx_u64m8_m(...) __riscv_vwsubu_wx_u64m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_i16m1(...) __riscv_vxor_vv_i16m1(__VA_ARGS__) |
| #define | vxor_vv_i16m1_m(...) __riscv_vxor_vv_i16m1_tumu(__VA_ARGS__) |
| #define | vxor_vv_i16m2(...) __riscv_vxor_vv_i16m2(__VA_ARGS__) |
| #define | vxor_vv_i16m2_m(...) __riscv_vxor_vv_i16m2_tumu(__VA_ARGS__) |
| #define | vxor_vv_i16m4(...) __riscv_vxor_vv_i16m4(__VA_ARGS__) |
| #define | vxor_vv_i16m4_m(...) __riscv_vxor_vv_i16m4_tumu(__VA_ARGS__) |
| #define | vxor_vv_i16m8(...) __riscv_vxor_vv_i16m8(__VA_ARGS__) |
| #define | vxor_vv_i16m8_m(...) __riscv_vxor_vv_i16m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_i16mf2(...) __riscv_vxor_vv_i16mf2(__VA_ARGS__) |
| #define | vxor_vv_i16mf2_m(...) __riscv_vxor_vv_i16mf2_tumu(__VA_ARGS__) |
| #define | vxor_vv_i16mf4(...) __riscv_vxor_vv_i16mf4(__VA_ARGS__) |
| #define | vxor_vv_i16mf4_m(...) __riscv_vxor_vv_i16mf4_tumu(__VA_ARGS__) |
| #define | vxor_vv_i32m1(...) __riscv_vxor_vv_i32m1(__VA_ARGS__) |
| #define | vxor_vv_i32m1_m(...) __riscv_vxor_vv_i32m1_tumu(__VA_ARGS__) |
| #define | vxor_vv_i32m2(...) __riscv_vxor_vv_i32m2(__VA_ARGS__) |
| #define | vxor_vv_i32m2_m(...) __riscv_vxor_vv_i32m2_tumu(__VA_ARGS__) |
| #define | vxor_vv_i32m4(...) __riscv_vxor_vv_i32m4(__VA_ARGS__) |
| #define | vxor_vv_i32m4_m(...) __riscv_vxor_vv_i32m4_tumu(__VA_ARGS__) |
| #define | vxor_vv_i32m8(...) __riscv_vxor_vv_i32m8(__VA_ARGS__) |
| #define | vxor_vv_i32m8_m(...) __riscv_vxor_vv_i32m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_i32mf2(...) __riscv_vxor_vv_i32mf2(__VA_ARGS__) |
| #define | vxor_vv_i32mf2_m(...) __riscv_vxor_vv_i32mf2_tumu(__VA_ARGS__) |
| #define | vxor_vv_i64m1(...) __riscv_vxor_vv_i64m1(__VA_ARGS__) |
| #define | vxor_vv_i64m1_m(...) __riscv_vxor_vv_i64m1_tumu(__VA_ARGS__) |
| #define | vxor_vv_i64m2(...) __riscv_vxor_vv_i64m2(__VA_ARGS__) |
| #define | vxor_vv_i64m2_m(...) __riscv_vxor_vv_i64m2_tumu(__VA_ARGS__) |
| #define | vxor_vv_i64m4(...) __riscv_vxor_vv_i64m4(__VA_ARGS__) |
| #define | vxor_vv_i64m4_m(...) __riscv_vxor_vv_i64m4_tumu(__VA_ARGS__) |
| #define | vxor_vv_i64m8(...) __riscv_vxor_vv_i64m8(__VA_ARGS__) |
| #define | vxor_vv_i64m8_m(...) __riscv_vxor_vv_i64m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_i8m1(...) __riscv_vxor_vv_i8m1(__VA_ARGS__) |
| #define | vxor_vv_i8m1_m(...) __riscv_vxor_vv_i8m1_tumu(__VA_ARGS__) |
| #define | vxor_vv_i8m2(...) __riscv_vxor_vv_i8m2(__VA_ARGS__) |
| #define | vxor_vv_i8m2_m(...) __riscv_vxor_vv_i8m2_tumu(__VA_ARGS__) |
| #define | vxor_vv_i8m4(...) __riscv_vxor_vv_i8m4(__VA_ARGS__) |
| #define | vxor_vv_i8m4_m(...) __riscv_vxor_vv_i8m4_tumu(__VA_ARGS__) |
| #define | vxor_vv_i8m8(...) __riscv_vxor_vv_i8m8(__VA_ARGS__) |
| #define | vxor_vv_i8m8_m(...) __riscv_vxor_vv_i8m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_i8mf2(...) __riscv_vxor_vv_i8mf2(__VA_ARGS__) |
| #define | vxor_vv_i8mf2_m(...) __riscv_vxor_vv_i8mf2_tumu(__VA_ARGS__) |
| #define | vxor_vv_i8mf4(...) __riscv_vxor_vv_i8mf4(__VA_ARGS__) |
| #define | vxor_vv_i8mf4_m(...) __riscv_vxor_vv_i8mf4_tumu(__VA_ARGS__) |
| #define | vxor_vv_i8mf8(...) __riscv_vxor_vv_i8mf8(__VA_ARGS__) |
| #define | vxor_vv_i8mf8_m(...) __riscv_vxor_vv_i8mf8_tumu(__VA_ARGS__) |
| #define | vxor_vv_u16m1(...) __riscv_vxor_vv_u16m1(__VA_ARGS__) |
| #define | vxor_vv_u16m1_m(...) __riscv_vxor_vv_u16m1_tumu(__VA_ARGS__) |
| #define | vxor_vv_u16m2(...) __riscv_vxor_vv_u16m2(__VA_ARGS__) |
| #define | vxor_vv_u16m2_m(...) __riscv_vxor_vv_u16m2_tumu(__VA_ARGS__) |
| #define | vxor_vv_u16m4(...) __riscv_vxor_vv_u16m4(__VA_ARGS__) |
| #define | vxor_vv_u16m4_m(...) __riscv_vxor_vv_u16m4_tumu(__VA_ARGS__) |
| #define | vxor_vv_u16m8(...) __riscv_vxor_vv_u16m8(__VA_ARGS__) |
| #define | vxor_vv_u16m8_m(...) __riscv_vxor_vv_u16m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_u16mf2(...) __riscv_vxor_vv_u16mf2(__VA_ARGS__) |
| #define | vxor_vv_u16mf2_m(...) __riscv_vxor_vv_u16mf2_tumu(__VA_ARGS__) |
| #define | vxor_vv_u16mf4(...) __riscv_vxor_vv_u16mf4(__VA_ARGS__) |
| #define | vxor_vv_u16mf4_m(...) __riscv_vxor_vv_u16mf4_tumu(__VA_ARGS__) |
| #define | vxor_vv_u32m1(...) __riscv_vxor_vv_u32m1(__VA_ARGS__) |
| #define | vxor_vv_u32m1_m(...) __riscv_vxor_vv_u32m1_tumu(__VA_ARGS__) |
| #define | vxor_vv_u32m2(...) __riscv_vxor_vv_u32m2(__VA_ARGS__) |
| #define | vxor_vv_u32m2_m(...) __riscv_vxor_vv_u32m2_tumu(__VA_ARGS__) |
| #define | vxor_vv_u32m4(...) __riscv_vxor_vv_u32m4(__VA_ARGS__) |
| #define | vxor_vv_u32m4_m(...) __riscv_vxor_vv_u32m4_tumu(__VA_ARGS__) |
| #define | vxor_vv_u32m8(...) __riscv_vxor_vv_u32m8(__VA_ARGS__) |
| #define | vxor_vv_u32m8_m(...) __riscv_vxor_vv_u32m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_u32mf2(...) __riscv_vxor_vv_u32mf2(__VA_ARGS__) |
| #define | vxor_vv_u32mf2_m(...) __riscv_vxor_vv_u32mf2_tumu(__VA_ARGS__) |
| #define | vxor_vv_u64m1(...) __riscv_vxor_vv_u64m1(__VA_ARGS__) |
| #define | vxor_vv_u64m1_m(...) __riscv_vxor_vv_u64m1_tumu(__VA_ARGS__) |
| #define | vxor_vv_u64m2(...) __riscv_vxor_vv_u64m2(__VA_ARGS__) |
| #define | vxor_vv_u64m2_m(...) __riscv_vxor_vv_u64m2_tumu(__VA_ARGS__) |
| #define | vxor_vv_u64m4(...) __riscv_vxor_vv_u64m4(__VA_ARGS__) |
| #define | vxor_vv_u64m4_m(...) __riscv_vxor_vv_u64m4_tumu(__VA_ARGS__) |
| #define | vxor_vv_u64m8(...) __riscv_vxor_vv_u64m8(__VA_ARGS__) |
| #define | vxor_vv_u64m8_m(...) __riscv_vxor_vv_u64m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_u8m1(...) __riscv_vxor_vv_u8m1(__VA_ARGS__) |
| #define | vxor_vv_u8m1_m(...) __riscv_vxor_vv_u8m1_tumu(__VA_ARGS__) |
| #define | vxor_vv_u8m2(...) __riscv_vxor_vv_u8m2(__VA_ARGS__) |
| #define | vxor_vv_u8m2_m(...) __riscv_vxor_vv_u8m2_tumu(__VA_ARGS__) |
| #define | vxor_vv_u8m4(...) __riscv_vxor_vv_u8m4(__VA_ARGS__) |
| #define | vxor_vv_u8m4_m(...) __riscv_vxor_vv_u8m4_tumu(__VA_ARGS__) |
| #define | vxor_vv_u8m8(...) __riscv_vxor_vv_u8m8(__VA_ARGS__) |
| #define | vxor_vv_u8m8_m(...) __riscv_vxor_vv_u8m8_tumu(__VA_ARGS__) |
| #define | vxor_vv_u8mf2(...) __riscv_vxor_vv_u8mf2(__VA_ARGS__) |
| #define | vxor_vv_u8mf2_m(...) __riscv_vxor_vv_u8mf2_tumu(__VA_ARGS__) |
| #define | vxor_vv_u8mf4(...) __riscv_vxor_vv_u8mf4(__VA_ARGS__) |
| #define | vxor_vv_u8mf4_m(...) __riscv_vxor_vv_u8mf4_tumu(__VA_ARGS__) |
| #define | vxor_vv_u8mf8(...) __riscv_vxor_vv_u8mf8(__VA_ARGS__) |
| #define | vxor_vv_u8mf8_m(...) __riscv_vxor_vv_u8mf8_tumu(__VA_ARGS__) |
| #define | vxor_vx_i16m1(...) __riscv_vxor_vx_i16m1(__VA_ARGS__) |
| #define | vxor_vx_i16m1_m(...) __riscv_vxor_vx_i16m1_tumu(__VA_ARGS__) |
| #define | vxor_vx_i16m2(...) __riscv_vxor_vx_i16m2(__VA_ARGS__) |
| #define | vxor_vx_i16m2_m(...) __riscv_vxor_vx_i16m2_tumu(__VA_ARGS__) |
| #define | vxor_vx_i16m4(...) __riscv_vxor_vx_i16m4(__VA_ARGS__) |
| #define | vxor_vx_i16m4_m(...) __riscv_vxor_vx_i16m4_tumu(__VA_ARGS__) |
| #define | vxor_vx_i16m8(...) __riscv_vxor_vx_i16m8(__VA_ARGS__) |
| #define | vxor_vx_i16m8_m(...) __riscv_vxor_vx_i16m8_tumu(__VA_ARGS__) |
| #define | vxor_vx_i16mf2(...) __riscv_vxor_vx_i16mf2(__VA_ARGS__) |
| #define | vxor_vx_i16mf2_m(...) __riscv_vxor_vx_i16mf2_tumu(__VA_ARGS__) |
| #define | vxor_vx_i16mf4(...) __riscv_vxor_vx_i16mf4(__VA_ARGS__) |
| #define | vxor_vx_i16mf4_m(...) __riscv_vxor_vx_i16mf4_tumu(__VA_ARGS__) |
| #define | vxor_vx_i32m1(...) __riscv_vxor_vx_i32m1(__VA_ARGS__) |
| #define | vxor_vx_i32m1_m(...) __riscv_vxor_vx_i32m1_tumu(__VA_ARGS__) |
| #define | vxor_vx_i32m2(...) __riscv_vxor_vx_i32m2(__VA_ARGS__) |
| #define | vxor_vx_i32m2_m(...) __riscv_vxor_vx_i32m2_tumu(__VA_ARGS__) |
| #define | vxor_vx_i32m4(...) __riscv_vxor_vx_i32m4(__VA_ARGS__) |
| #define | vxor_vx_i32m4_m(...) __riscv_vxor_vx_i32m4_tumu(__VA_ARGS__) |
| #define | vxor_vx_i32m8(...) __riscv_vxor_vx_i32m8(__VA_ARGS__) |
| #define | vxor_vx_i32m8_m(...) __riscv_vxor_vx_i32m8_tumu(__VA_ARGS__) |
| #define | vxor_vx_i32mf2(...) __riscv_vxor_vx_i32mf2(__VA_ARGS__) |
| #define | vxor_vx_i32mf2_m(...) __riscv_vxor_vx_i32mf2_tumu(__VA_ARGS__) |
| #define | vxor_vx_i64m1(...) __riscv_vxor_vx_i64m1(__VA_ARGS__) |
| #define | vxor_vx_i64m1_m(...) __riscv_vxor_vx_i64m1_tumu(__VA_ARGS__) |
| #define | vxor_vx_i64m2(...) __riscv_vxor_vx_i64m2(__VA_ARGS__) |
| #define | vxor_vx_i64m2_m(...) __riscv_vxor_vx_i64m2_tumu(__VA_ARGS__) |
| #define | vxor_vx_i64m4(...) __riscv_vxor_vx_i64m4(__VA_ARGS__) |
| #define | vxor_vx_i64m4_m(...) __riscv_vxor_vx_i64m4_tumu(__VA_ARGS__) |
| #define | vxor_vx_i64m8(...) __riscv_vxor_vx_i64m8(__VA_ARGS__) |
| #define | vxor_vx_i64m8_m(...) __riscv_vxor_vx_i64m8_tumu(__VA_ARGS__) |
| #define | vxor_vx_i8m1(...) __riscv_vxor_vx_i8m1(__VA_ARGS__) |
| #define | vxor_vx_i8m1_m(...) __riscv_vxor_vx_i8m1_tumu(__VA_ARGS__) |
| #define | vxor_vx_i8m2(...) __riscv_vxor_vx_i8m2(__VA_ARGS__) |
| #define | vxor_vx_i8m2_m(...) __riscv_vxor_vx_i8m2_tumu(__VA_ARGS__) |
| #define | vxor_vx_i8m4(...) __riscv_vxor_vx_i8m4(__VA_ARGS__) |
| #define | vxor_vx_i8m4_m(...) __riscv_vxor_vx_i8m4_tumu(__VA_ARGS__) |
| #define | vxor_vx_i8m8(...) __riscv_vxor_vx_i8m8(__VA_ARGS__) |
| #define | vxor_vx_i8m8_m(...) __riscv_vxor_vx_i8m8_tumu(__VA_ARGS__) |
| #define | vxor_vx_i8mf2(...) __riscv_vxor_vx_i8mf2(__VA_ARGS__) |
| #define | vxor_vx_i8mf2_m(...) __riscv_vxor_vx_i8mf2_tumu(__VA_ARGS__) |
| #define | vxor_vx_i8mf4(...) __riscv_vxor_vx_i8mf4(__VA_ARGS__) |
| #define | vxor_vx_i8mf4_m(...) __riscv_vxor_vx_i8mf4_tumu(__VA_ARGS__) |
| #define | vxor_vx_i8mf8(...) __riscv_vxor_vx_i8mf8(__VA_ARGS__) |
| #define | vxor_vx_i8mf8_m(...) __riscv_vxor_vx_i8mf8_tumu(__VA_ARGS__) |
| #define | vxor_vx_u16m1(...) __riscv_vxor_vx_u16m1(__VA_ARGS__) |
| #define | vxor_vx_u16m1_m(...) __riscv_vxor_vx_u16m1_tumu(__VA_ARGS__) |
| #define | vxor_vx_u16m2(...) __riscv_vxor_vx_u16m2(__VA_ARGS__) |
| #define | vxor_vx_u16m2_m(...) __riscv_vxor_vx_u16m2_tumu(__VA_ARGS__) |
| #define | vxor_vx_u16m4(...) __riscv_vxor_vx_u16m4(__VA_ARGS__) |
| #define | vxor_vx_u16m4_m(...) __riscv_vxor_vx_u16m4_tumu(__VA_ARGS__) |
| #define | vxor_vx_u16m8(...) __riscv_vxor_vx_u16m8(__VA_ARGS__) |
| #define | vxor_vx_u16m8_m(...) __riscv_vxor_vx_u16m8_tumu(__VA_ARGS__) |
| #define | vxor_vx_u16mf2(...) __riscv_vxor_vx_u16mf2(__VA_ARGS__) |
| #define | vxor_vx_u16mf2_m(...) __riscv_vxor_vx_u16mf2_tumu(__VA_ARGS__) |
| #define | vxor_vx_u16mf4(...) __riscv_vxor_vx_u16mf4(__VA_ARGS__) |
| #define | vxor_vx_u16mf4_m(...) __riscv_vxor_vx_u16mf4_tumu(__VA_ARGS__) |
| #define | vxor_vx_u32m1(...) __riscv_vxor_vx_u32m1(__VA_ARGS__) |
| #define | vxor_vx_u32m1_m(...) __riscv_vxor_vx_u32m1_tumu(__VA_ARGS__) |
| #define | vxor_vx_u32m2(...) __riscv_vxor_vx_u32m2(__VA_ARGS__) |
| #define | vxor_vx_u32m2_m(...) __riscv_vxor_vx_u32m2_tumu(__VA_ARGS__) |
| #define | vxor_vx_u32m4(...) __riscv_vxor_vx_u32m4(__VA_ARGS__) |
| #define | vxor_vx_u32m4_m(...) __riscv_vxor_vx_u32m4_tumu(__VA_ARGS__) |
| #define | vxor_vx_u32m8(...) __riscv_vxor_vx_u32m8(__VA_ARGS__) |
| #define | vxor_vx_u32m8_m(...) __riscv_vxor_vx_u32m8_tumu(__VA_ARGS__) |
| #define | vxor_vx_u32mf2(...) __riscv_vxor_vx_u32mf2(__VA_ARGS__) |
| #define | vxor_vx_u32mf2_m(...) __riscv_vxor_vx_u32mf2_tumu(__VA_ARGS__) |
| #define | vxor_vx_u64m1(...) __riscv_vxor_vx_u64m1(__VA_ARGS__) |
| #define | vxor_vx_u64m1_m(...) __riscv_vxor_vx_u64m1_tumu(__VA_ARGS__) |
| #define | vxor_vx_u64m2(...) __riscv_vxor_vx_u64m2(__VA_ARGS__) |
| #define | vxor_vx_u64m2_m(...) __riscv_vxor_vx_u64m2_tumu(__VA_ARGS__) |
| #define | vxor_vx_u64m4(...) __riscv_vxor_vx_u64m4(__VA_ARGS__) |
| #define | vxor_vx_u64m4_m(...) __riscv_vxor_vx_u64m4_tumu(__VA_ARGS__) |
| #define | vxor_vx_u64m8(...) __riscv_vxor_vx_u64m8(__VA_ARGS__) |
| #define | vxor_vx_u64m8_m(...) __riscv_vxor_vx_u64m8_tumu(__VA_ARGS__) |
| #define | vxor_vx_u8m1(...) __riscv_vxor_vx_u8m1(__VA_ARGS__) |
| #define | vxor_vx_u8m1_m(...) __riscv_vxor_vx_u8m1_tumu(__VA_ARGS__) |
| #define | vxor_vx_u8m2(...) __riscv_vxor_vx_u8m2(__VA_ARGS__) |
| #define | vxor_vx_u8m2_m(...) __riscv_vxor_vx_u8m2_tumu(__VA_ARGS__) |
| #define | vxor_vx_u8m4(...) __riscv_vxor_vx_u8m4(__VA_ARGS__) |
| #define | vxor_vx_u8m4_m(...) __riscv_vxor_vx_u8m4_tumu(__VA_ARGS__) |
| #define | vxor_vx_u8m8(...) __riscv_vxor_vx_u8m8(__VA_ARGS__) |
| #define | vxor_vx_u8m8_m(...) __riscv_vxor_vx_u8m8_tumu(__VA_ARGS__) |
| #define | vxor_vx_u8mf2(...) __riscv_vxor_vx_u8mf2(__VA_ARGS__) |
| #define | vxor_vx_u8mf2_m(...) __riscv_vxor_vx_u8mf2_tumu(__VA_ARGS__) |
| #define | vxor_vx_u8mf4(...) __riscv_vxor_vx_u8mf4(__VA_ARGS__) |
| #define | vxor_vx_u8mf4_m(...) __riscv_vxor_vx_u8mf4_tumu(__VA_ARGS__) |
| #define | vxor_vx_u8mf8(...) __riscv_vxor_vx_u8mf8(__VA_ARGS__) |
| #define | vxor_vx_u8mf8_m(...) __riscv_vxor_vx_u8mf8_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u16m1(...) __riscv_vzext_vf2_u16m1(__VA_ARGS__) |
| #define | vzext_vf2_u16m1_m(...) __riscv_vzext_vf2_u16m1_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u16m2(...) __riscv_vzext_vf2_u16m2(__VA_ARGS__) |
| #define | vzext_vf2_u16m2_m(...) __riscv_vzext_vf2_u16m2_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u16m4(...) __riscv_vzext_vf2_u16m4(__VA_ARGS__) |
| #define | vzext_vf2_u16m4_m(...) __riscv_vzext_vf2_u16m4_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u16m8(...) __riscv_vzext_vf2_u16m8(__VA_ARGS__) |
| #define | vzext_vf2_u16m8_m(...) __riscv_vzext_vf2_u16m8_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u16mf2(...) __riscv_vzext_vf2_u16mf2(__VA_ARGS__) |
| #define | vzext_vf2_u16mf2_m(...) __riscv_vzext_vf2_u16mf2_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u16mf4(...) __riscv_vzext_vf2_u16mf4(__VA_ARGS__) |
| #define | vzext_vf2_u16mf4_m(...) __riscv_vzext_vf2_u16mf4_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u32m1(...) __riscv_vzext_vf2_u32m1(__VA_ARGS__) |
| #define | vzext_vf2_u32m1_m(...) __riscv_vzext_vf2_u32m1_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u32m2(...) __riscv_vzext_vf2_u32m2(__VA_ARGS__) |
| #define | vzext_vf2_u32m2_m(...) __riscv_vzext_vf2_u32m2_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u32m4(...) __riscv_vzext_vf2_u32m4(__VA_ARGS__) |
| #define | vzext_vf2_u32m4_m(...) __riscv_vzext_vf2_u32m4_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u32m8(...) __riscv_vzext_vf2_u32m8(__VA_ARGS__) |
| #define | vzext_vf2_u32m8_m(...) __riscv_vzext_vf2_u32m8_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u32mf2(...) __riscv_vzext_vf2_u32mf2(__VA_ARGS__) |
| #define | vzext_vf2_u32mf2_m(...) __riscv_vzext_vf2_u32mf2_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u64m1(...) __riscv_vzext_vf2_u64m1(__VA_ARGS__) |
| #define | vzext_vf2_u64m1_m(...) __riscv_vzext_vf2_u64m1_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u64m2(...) __riscv_vzext_vf2_u64m2(__VA_ARGS__) |
| #define | vzext_vf2_u64m2_m(...) __riscv_vzext_vf2_u64m2_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u64m4(...) __riscv_vzext_vf2_u64m4(__VA_ARGS__) |
| #define | vzext_vf2_u64m4_m(...) __riscv_vzext_vf2_u64m4_tumu(__VA_ARGS__) |
| #define | vzext_vf2_u64m8(...) __riscv_vzext_vf2_u64m8(__VA_ARGS__) |
| #define | vzext_vf2_u64m8_m(...) __riscv_vzext_vf2_u64m8_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u32m1(...) __riscv_vzext_vf4_u32m1(__VA_ARGS__) |
| #define | vzext_vf4_u32m1_m(...) __riscv_vzext_vf4_u32m1_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u32m2(...) __riscv_vzext_vf4_u32m2(__VA_ARGS__) |
| #define | vzext_vf4_u32m2_m(...) __riscv_vzext_vf4_u32m2_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u32m4(...) __riscv_vzext_vf4_u32m4(__VA_ARGS__) |
| #define | vzext_vf4_u32m4_m(...) __riscv_vzext_vf4_u32m4_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u32m8(...) __riscv_vzext_vf4_u32m8(__VA_ARGS__) |
| #define | vzext_vf4_u32m8_m(...) __riscv_vzext_vf4_u32m8_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u32mf2(...) __riscv_vzext_vf4_u32mf2(__VA_ARGS__) |
| #define | vzext_vf4_u32mf2_m(...) __riscv_vzext_vf4_u32mf2_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u64m1(...) __riscv_vzext_vf4_u64m1(__VA_ARGS__) |
| #define | vzext_vf4_u64m1_m(...) __riscv_vzext_vf4_u64m1_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u64m2(...) __riscv_vzext_vf4_u64m2(__VA_ARGS__) |
| #define | vzext_vf4_u64m2_m(...) __riscv_vzext_vf4_u64m2_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u64m4(...) __riscv_vzext_vf4_u64m4(__VA_ARGS__) |
| #define | vzext_vf4_u64m4_m(...) __riscv_vzext_vf4_u64m4_tumu(__VA_ARGS__) |
| #define | vzext_vf4_u64m8(...) __riscv_vzext_vf4_u64m8(__VA_ARGS__) |
| #define | vzext_vf4_u64m8_m(...) __riscv_vzext_vf4_u64m8_tumu(__VA_ARGS__) |
| #define | vzext_vf8_u64m1(...) __riscv_vzext_vf8_u64m1(__VA_ARGS__) |
| #define | vzext_vf8_u64m1_m(...) __riscv_vzext_vf8_u64m1_tumu(__VA_ARGS__) |
| #define | vzext_vf8_u64m2(...) __riscv_vzext_vf8_u64m2(__VA_ARGS__) |
| #define | vzext_vf8_u64m2_m(...) __riscv_vzext_vf8_u64m2_tumu(__VA_ARGS__) |
| #define | vzext_vf8_u64m4(...) __riscv_vzext_vf8_u64m4(__VA_ARGS__) |
| #define | vzext_vf8_u64m4_m(...) __riscv_vzext_vf8_u64m4_tumu(__VA_ARGS__) |
| #define | vzext_vf8_u64m8(...) __riscv_vzext_vf8_u64m8(__VA_ARGS__) |
| #define | vzext_vf8_u64m8_m(...) __riscv_vzext_vf8_u64m8_tumu(__VA_ARGS__) |
| #define vaadd_vv_i16m1 | ( | ... | ) | __riscv_vaadd_vv_i16m1(__VA_ARGS__) |
| #define vaadd_vv_i16m1_m | ( | ... | ) | __riscv_vaadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define vaadd_vv_i16m2 | ( | ... | ) | __riscv_vaadd_vv_i16m2(__VA_ARGS__) |
| #define vaadd_vv_i16m2_m | ( | ... | ) | __riscv_vaadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define vaadd_vv_i16m4 | ( | ... | ) | __riscv_vaadd_vv_i16m4(__VA_ARGS__) |
| #define vaadd_vv_i16m4_m | ( | ... | ) | __riscv_vaadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define vaadd_vv_i16m8 | ( | ... | ) | __riscv_vaadd_vv_i16m8(__VA_ARGS__) |
| #define vaadd_vv_i16m8_m | ( | ... | ) | __riscv_vaadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define vaadd_vv_i16mf2 | ( | ... | ) | __riscv_vaadd_vv_i16mf2(__VA_ARGS__) |
| #define vaadd_vv_i16mf2_m | ( | ... | ) | __riscv_vaadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vaadd_vv_i16mf4 | ( | ... | ) | __riscv_vaadd_vv_i16mf4(__VA_ARGS__) |
| #define vaadd_vv_i16mf4_m | ( | ... | ) | __riscv_vaadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vaadd_vv_i32m1 | ( | ... | ) | __riscv_vaadd_vv_i32m1(__VA_ARGS__) |
| #define vaadd_vv_i32m1_m | ( | ... | ) | __riscv_vaadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define vaadd_vv_i32m2 | ( | ... | ) | __riscv_vaadd_vv_i32m2(__VA_ARGS__) |
| #define vaadd_vv_i32m2_m | ( | ... | ) | __riscv_vaadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define vaadd_vv_i32m4 | ( | ... | ) | __riscv_vaadd_vv_i32m4(__VA_ARGS__) |
| #define vaadd_vv_i32m4_m | ( | ... | ) | __riscv_vaadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define vaadd_vv_i32m8 | ( | ... | ) | __riscv_vaadd_vv_i32m8(__VA_ARGS__) |
| #define vaadd_vv_i32m8_m | ( | ... | ) | __riscv_vaadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define vaadd_vv_i32mf2 | ( | ... | ) | __riscv_vaadd_vv_i32mf2(__VA_ARGS__) |
| #define vaadd_vv_i32mf2_m | ( | ... | ) | __riscv_vaadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vaadd_vv_i64m1 | ( | ... | ) | __riscv_vaadd_vv_i64m1(__VA_ARGS__) |
| #define vaadd_vv_i64m1_m | ( | ... | ) | __riscv_vaadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define vaadd_vv_i64m2 | ( | ... | ) | __riscv_vaadd_vv_i64m2(__VA_ARGS__) |
| #define vaadd_vv_i64m2_m | ( | ... | ) | __riscv_vaadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define vaadd_vv_i64m4 | ( | ... | ) | __riscv_vaadd_vv_i64m4(__VA_ARGS__) |
| #define vaadd_vv_i64m4_m | ( | ... | ) | __riscv_vaadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define vaadd_vv_i64m8 | ( | ... | ) | __riscv_vaadd_vv_i64m8(__VA_ARGS__) |
| #define vaadd_vv_i64m8_m | ( | ... | ) | __riscv_vaadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define vaadd_vv_i8m1 | ( | ... | ) | __riscv_vaadd_vv_i8m1(__VA_ARGS__) |
| #define vaadd_vv_i8m1_m | ( | ... | ) | __riscv_vaadd_vv_i8m1_tumu(__VA_ARGS__) |
| #define vaadd_vv_i8m2 | ( | ... | ) | __riscv_vaadd_vv_i8m2(__VA_ARGS__) |
| #define vaadd_vv_i8m2_m | ( | ... | ) | __riscv_vaadd_vv_i8m2_tumu(__VA_ARGS__) |
| #define vaadd_vv_i8m4 | ( | ... | ) | __riscv_vaadd_vv_i8m4(__VA_ARGS__) |
| #define vaadd_vv_i8m4_m | ( | ... | ) | __riscv_vaadd_vv_i8m4_tumu(__VA_ARGS__) |
| #define vaadd_vv_i8m8 | ( | ... | ) | __riscv_vaadd_vv_i8m8(__VA_ARGS__) |
| #define vaadd_vv_i8m8_m | ( | ... | ) | __riscv_vaadd_vv_i8m8_tumu(__VA_ARGS__) |
| #define vaadd_vv_i8mf2 | ( | ... | ) | __riscv_vaadd_vv_i8mf2(__VA_ARGS__) |
| #define vaadd_vv_i8mf2_m | ( | ... | ) | __riscv_vaadd_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vaadd_vv_i8mf4 | ( | ... | ) | __riscv_vaadd_vv_i8mf4(__VA_ARGS__) |
| #define vaadd_vv_i8mf4_m | ( | ... | ) | __riscv_vaadd_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vaadd_vv_i8mf8 | ( | ... | ) | __riscv_vaadd_vv_i8mf8(__VA_ARGS__) |
| #define vaadd_vv_i8mf8_m | ( | ... | ) | __riscv_vaadd_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vaadd_vx_i16m1 | ( | ... | ) | __riscv_vaadd_vx_i16m1(__VA_ARGS__) |
| #define vaadd_vx_i16m1_m | ( | ... | ) | __riscv_vaadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define vaadd_vx_i16m2 | ( | ... | ) | __riscv_vaadd_vx_i16m2(__VA_ARGS__) |
| #define vaadd_vx_i16m2_m | ( | ... | ) | __riscv_vaadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define vaadd_vx_i16m4 | ( | ... | ) | __riscv_vaadd_vx_i16m4(__VA_ARGS__) |
| #define vaadd_vx_i16m4_m | ( | ... | ) | __riscv_vaadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define vaadd_vx_i16m8 | ( | ... | ) | __riscv_vaadd_vx_i16m8(__VA_ARGS__) |
| #define vaadd_vx_i16m8_m | ( | ... | ) | __riscv_vaadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define vaadd_vx_i16mf2 | ( | ... | ) | __riscv_vaadd_vx_i16mf2(__VA_ARGS__) |
| #define vaadd_vx_i16mf2_m | ( | ... | ) | __riscv_vaadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vaadd_vx_i16mf4 | ( | ... | ) | __riscv_vaadd_vx_i16mf4(__VA_ARGS__) |
| #define vaadd_vx_i16mf4_m | ( | ... | ) | __riscv_vaadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vaadd_vx_i32m1 | ( | ... | ) | __riscv_vaadd_vx_i32m1(__VA_ARGS__) |
| #define vaadd_vx_i32m1_m | ( | ... | ) | __riscv_vaadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define vaadd_vx_i32m2 | ( | ... | ) | __riscv_vaadd_vx_i32m2(__VA_ARGS__) |
| #define vaadd_vx_i32m2_m | ( | ... | ) | __riscv_vaadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define vaadd_vx_i32m4 | ( | ... | ) | __riscv_vaadd_vx_i32m4(__VA_ARGS__) |
| #define vaadd_vx_i32m4_m | ( | ... | ) | __riscv_vaadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define vaadd_vx_i32m8 | ( | ... | ) | __riscv_vaadd_vx_i32m8(__VA_ARGS__) |
| #define vaadd_vx_i32m8_m | ( | ... | ) | __riscv_vaadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define vaadd_vx_i32mf2 | ( | ... | ) | __riscv_vaadd_vx_i32mf2(__VA_ARGS__) |
| #define vaadd_vx_i32mf2_m | ( | ... | ) | __riscv_vaadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vaadd_vx_i64m1 | ( | ... | ) | __riscv_vaadd_vx_i64m1(__VA_ARGS__) |
| #define vaadd_vx_i64m1_m | ( | ... | ) | __riscv_vaadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define vaadd_vx_i64m2 | ( | ... | ) | __riscv_vaadd_vx_i64m2(__VA_ARGS__) |
| #define vaadd_vx_i64m2_m | ( | ... | ) | __riscv_vaadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define vaadd_vx_i64m4 | ( | ... | ) | __riscv_vaadd_vx_i64m4(__VA_ARGS__) |
| #define vaadd_vx_i64m4_m | ( | ... | ) | __riscv_vaadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define vaadd_vx_i64m8 | ( | ... | ) | __riscv_vaadd_vx_i64m8(__VA_ARGS__) |
| #define vaadd_vx_i64m8_m | ( | ... | ) | __riscv_vaadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define vaadd_vx_i8m1 | ( | ... | ) | __riscv_vaadd_vx_i8m1(__VA_ARGS__) |
| #define vaadd_vx_i8m1_m | ( | ... | ) | __riscv_vaadd_vx_i8m1_tumu(__VA_ARGS__) |
| #define vaadd_vx_i8m2 | ( | ... | ) | __riscv_vaadd_vx_i8m2(__VA_ARGS__) |
| #define vaadd_vx_i8m2_m | ( | ... | ) | __riscv_vaadd_vx_i8m2_tumu(__VA_ARGS__) |
| #define vaadd_vx_i8m4 | ( | ... | ) | __riscv_vaadd_vx_i8m4(__VA_ARGS__) |
| #define vaadd_vx_i8m4_m | ( | ... | ) | __riscv_vaadd_vx_i8m4_tumu(__VA_ARGS__) |
| #define vaadd_vx_i8m8 | ( | ... | ) | __riscv_vaadd_vx_i8m8(__VA_ARGS__) |
| #define vaadd_vx_i8m8_m | ( | ... | ) | __riscv_vaadd_vx_i8m8_tumu(__VA_ARGS__) |
| #define vaadd_vx_i8mf2 | ( | ... | ) | __riscv_vaadd_vx_i8mf2(__VA_ARGS__) |
| #define vaadd_vx_i8mf2_m | ( | ... | ) | __riscv_vaadd_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vaadd_vx_i8mf4 | ( | ... | ) | __riscv_vaadd_vx_i8mf4(__VA_ARGS__) |
| #define vaadd_vx_i8mf4_m | ( | ... | ) | __riscv_vaadd_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vaadd_vx_i8mf8 | ( | ... | ) | __riscv_vaadd_vx_i8mf8(__VA_ARGS__) |
| #define vaadd_vx_i8mf8_m | ( | ... | ) | __riscv_vaadd_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u16m1 | ( | ... | ) | __riscv_vaaddu_vv_u16m1(__VA_ARGS__) |
| #define vaaddu_vv_u16m1_m | ( | ... | ) | __riscv_vaaddu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u16m2 | ( | ... | ) | __riscv_vaaddu_vv_u16m2(__VA_ARGS__) |
| #define vaaddu_vv_u16m2_m | ( | ... | ) | __riscv_vaaddu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u16m4 | ( | ... | ) | __riscv_vaaddu_vv_u16m4(__VA_ARGS__) |
| #define vaaddu_vv_u16m4_m | ( | ... | ) | __riscv_vaaddu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u16m8 | ( | ... | ) | __riscv_vaaddu_vv_u16m8(__VA_ARGS__) |
| #define vaaddu_vv_u16m8_m | ( | ... | ) | __riscv_vaaddu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u16mf2 | ( | ... | ) | __riscv_vaaddu_vv_u16mf2(__VA_ARGS__) |
| #define vaaddu_vv_u16mf2_m | ( | ... | ) | __riscv_vaaddu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u16mf4 | ( | ... | ) | __riscv_vaaddu_vv_u16mf4(__VA_ARGS__) |
| #define vaaddu_vv_u16mf4_m | ( | ... | ) | __riscv_vaaddu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u32m1 | ( | ... | ) | __riscv_vaaddu_vv_u32m1(__VA_ARGS__) |
| #define vaaddu_vv_u32m1_m | ( | ... | ) | __riscv_vaaddu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u32m2 | ( | ... | ) | __riscv_vaaddu_vv_u32m2(__VA_ARGS__) |
| #define vaaddu_vv_u32m2_m | ( | ... | ) | __riscv_vaaddu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u32m4 | ( | ... | ) | __riscv_vaaddu_vv_u32m4(__VA_ARGS__) |
| #define vaaddu_vv_u32m4_m | ( | ... | ) | __riscv_vaaddu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u32m8 | ( | ... | ) | __riscv_vaaddu_vv_u32m8(__VA_ARGS__) |
| #define vaaddu_vv_u32m8_m | ( | ... | ) | __riscv_vaaddu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u32mf2 | ( | ... | ) | __riscv_vaaddu_vv_u32mf2(__VA_ARGS__) |
| #define vaaddu_vv_u32mf2_m | ( | ... | ) | __riscv_vaaddu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u64m1 | ( | ... | ) | __riscv_vaaddu_vv_u64m1(__VA_ARGS__) |
| #define vaaddu_vv_u64m1_m | ( | ... | ) | __riscv_vaaddu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u64m2 | ( | ... | ) | __riscv_vaaddu_vv_u64m2(__VA_ARGS__) |
| #define vaaddu_vv_u64m2_m | ( | ... | ) | __riscv_vaaddu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u64m4 | ( | ... | ) | __riscv_vaaddu_vv_u64m4(__VA_ARGS__) |
| #define vaaddu_vv_u64m4_m | ( | ... | ) | __riscv_vaaddu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u64m8 | ( | ... | ) | __riscv_vaaddu_vv_u64m8(__VA_ARGS__) |
| #define vaaddu_vv_u64m8_m | ( | ... | ) | __riscv_vaaddu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u8m1 | ( | ... | ) | __riscv_vaaddu_vv_u8m1(__VA_ARGS__) |
| #define vaaddu_vv_u8m1_m | ( | ... | ) | __riscv_vaaddu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u8m2 | ( | ... | ) | __riscv_vaaddu_vv_u8m2(__VA_ARGS__) |
| #define vaaddu_vv_u8m2_m | ( | ... | ) | __riscv_vaaddu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u8m4 | ( | ... | ) | __riscv_vaaddu_vv_u8m4(__VA_ARGS__) |
| #define vaaddu_vv_u8m4_m | ( | ... | ) | __riscv_vaaddu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u8m8 | ( | ... | ) | __riscv_vaaddu_vv_u8m8(__VA_ARGS__) |
| #define vaaddu_vv_u8m8_m | ( | ... | ) | __riscv_vaaddu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u8mf2 | ( | ... | ) | __riscv_vaaddu_vv_u8mf2(__VA_ARGS__) |
| #define vaaddu_vv_u8mf2_m | ( | ... | ) | __riscv_vaaddu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u8mf4 | ( | ... | ) | __riscv_vaaddu_vv_u8mf4(__VA_ARGS__) |
| #define vaaddu_vv_u8mf4_m | ( | ... | ) | __riscv_vaaddu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vaaddu_vv_u8mf8 | ( | ... | ) | __riscv_vaaddu_vv_u8mf8(__VA_ARGS__) |
| #define vaaddu_vv_u8mf8_m | ( | ... | ) | __riscv_vaaddu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u16m1 | ( | ... | ) | __riscv_vaaddu_vx_u16m1(__VA_ARGS__) |
| #define vaaddu_vx_u16m1_m | ( | ... | ) | __riscv_vaaddu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u16m2 | ( | ... | ) | __riscv_vaaddu_vx_u16m2(__VA_ARGS__) |
| #define vaaddu_vx_u16m2_m | ( | ... | ) | __riscv_vaaddu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u16m4 | ( | ... | ) | __riscv_vaaddu_vx_u16m4(__VA_ARGS__) |
| #define vaaddu_vx_u16m4_m | ( | ... | ) | __riscv_vaaddu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u16m8 | ( | ... | ) | __riscv_vaaddu_vx_u16m8(__VA_ARGS__) |
| #define vaaddu_vx_u16m8_m | ( | ... | ) | __riscv_vaaddu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u16mf2 | ( | ... | ) | __riscv_vaaddu_vx_u16mf2(__VA_ARGS__) |
| #define vaaddu_vx_u16mf2_m | ( | ... | ) | __riscv_vaaddu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u16mf4 | ( | ... | ) | __riscv_vaaddu_vx_u16mf4(__VA_ARGS__) |
| #define vaaddu_vx_u16mf4_m | ( | ... | ) | __riscv_vaaddu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u32m1 | ( | ... | ) | __riscv_vaaddu_vx_u32m1(__VA_ARGS__) |
| #define vaaddu_vx_u32m1_m | ( | ... | ) | __riscv_vaaddu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u32m2 | ( | ... | ) | __riscv_vaaddu_vx_u32m2(__VA_ARGS__) |
| #define vaaddu_vx_u32m2_m | ( | ... | ) | __riscv_vaaddu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u32m4 | ( | ... | ) | __riscv_vaaddu_vx_u32m4(__VA_ARGS__) |
| #define vaaddu_vx_u32m4_m | ( | ... | ) | __riscv_vaaddu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u32m8 | ( | ... | ) | __riscv_vaaddu_vx_u32m8(__VA_ARGS__) |
| #define vaaddu_vx_u32m8_m | ( | ... | ) | __riscv_vaaddu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u32mf2 | ( | ... | ) | __riscv_vaaddu_vx_u32mf2(__VA_ARGS__) |
| #define vaaddu_vx_u32mf2_m | ( | ... | ) | __riscv_vaaddu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u64m1 | ( | ... | ) | __riscv_vaaddu_vx_u64m1(__VA_ARGS__) |
| #define vaaddu_vx_u64m1_m | ( | ... | ) | __riscv_vaaddu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u64m2 | ( | ... | ) | __riscv_vaaddu_vx_u64m2(__VA_ARGS__) |
| #define vaaddu_vx_u64m2_m | ( | ... | ) | __riscv_vaaddu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u64m4 | ( | ... | ) | __riscv_vaaddu_vx_u64m4(__VA_ARGS__) |
| #define vaaddu_vx_u64m4_m | ( | ... | ) | __riscv_vaaddu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u64m8 | ( | ... | ) | __riscv_vaaddu_vx_u64m8(__VA_ARGS__) |
| #define vaaddu_vx_u64m8_m | ( | ... | ) | __riscv_vaaddu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u8m1 | ( | ... | ) | __riscv_vaaddu_vx_u8m1(__VA_ARGS__) |
| #define vaaddu_vx_u8m1_m | ( | ... | ) | __riscv_vaaddu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u8m2 | ( | ... | ) | __riscv_vaaddu_vx_u8m2(__VA_ARGS__) |
| #define vaaddu_vx_u8m2_m | ( | ... | ) | __riscv_vaaddu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u8m4 | ( | ... | ) | __riscv_vaaddu_vx_u8m4(__VA_ARGS__) |
| #define vaaddu_vx_u8m4_m | ( | ... | ) | __riscv_vaaddu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u8m8 | ( | ... | ) | __riscv_vaaddu_vx_u8m8(__VA_ARGS__) |
| #define vaaddu_vx_u8m8_m | ( | ... | ) | __riscv_vaaddu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u8mf2 | ( | ... | ) | __riscv_vaaddu_vx_u8mf2(__VA_ARGS__) |
| #define vaaddu_vx_u8mf2_m | ( | ... | ) | __riscv_vaaddu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u8mf4 | ( | ... | ) | __riscv_vaaddu_vx_u8mf4(__VA_ARGS__) |
| #define vaaddu_vx_u8mf4_m | ( | ... | ) | __riscv_vaaddu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vaaddu_vx_u8mf8 | ( | ... | ) | __riscv_vaaddu_vx_u8mf8(__VA_ARGS__) |
| #define vaaddu_vx_u8mf8_m | ( | ... | ) | __riscv_vaaddu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vadc_vvm_i16m1 | ( | ... | ) | __riscv_vadc_vvm_i16m1(__VA_ARGS__) |
| #define vadc_vvm_i16m2 | ( | ... | ) | __riscv_vadc_vvm_i16m2(__VA_ARGS__) |
| #define vadc_vvm_i16m4 | ( | ... | ) | __riscv_vadc_vvm_i16m4(__VA_ARGS__) |
| #define vadc_vvm_i16m8 | ( | ... | ) | __riscv_vadc_vvm_i16m8(__VA_ARGS__) |
| #define vadc_vvm_i16mf2 | ( | ... | ) | __riscv_vadc_vvm_i16mf2(__VA_ARGS__) |
| #define vadc_vvm_i16mf4 | ( | ... | ) | __riscv_vadc_vvm_i16mf4(__VA_ARGS__) |
| #define vadc_vvm_i32m1 | ( | ... | ) | __riscv_vadc_vvm_i32m1(__VA_ARGS__) |
| #define vadc_vvm_i32m2 | ( | ... | ) | __riscv_vadc_vvm_i32m2(__VA_ARGS__) |
| #define vadc_vvm_i32m4 | ( | ... | ) | __riscv_vadc_vvm_i32m4(__VA_ARGS__) |
| #define vadc_vvm_i32m8 | ( | ... | ) | __riscv_vadc_vvm_i32m8(__VA_ARGS__) |
| #define vadc_vvm_i32mf2 | ( | ... | ) | __riscv_vadc_vvm_i32mf2(__VA_ARGS__) |
| #define vadc_vvm_i64m1 | ( | ... | ) | __riscv_vadc_vvm_i64m1(__VA_ARGS__) |
| #define vadc_vvm_i64m2 | ( | ... | ) | __riscv_vadc_vvm_i64m2(__VA_ARGS__) |
| #define vadc_vvm_i64m4 | ( | ... | ) | __riscv_vadc_vvm_i64m4(__VA_ARGS__) |
| #define vadc_vvm_i64m8 | ( | ... | ) | __riscv_vadc_vvm_i64m8(__VA_ARGS__) |
| #define vadc_vvm_i8m1 | ( | ... | ) | __riscv_vadc_vvm_i8m1(__VA_ARGS__) |
| #define vadc_vvm_i8m2 | ( | ... | ) | __riscv_vadc_vvm_i8m2(__VA_ARGS__) |
| #define vadc_vvm_i8m4 | ( | ... | ) | __riscv_vadc_vvm_i8m4(__VA_ARGS__) |
| #define vadc_vvm_i8m8 | ( | ... | ) | __riscv_vadc_vvm_i8m8(__VA_ARGS__) |
| #define vadc_vvm_i8mf2 | ( | ... | ) | __riscv_vadc_vvm_i8mf2(__VA_ARGS__) |
| #define vadc_vvm_i8mf4 | ( | ... | ) | __riscv_vadc_vvm_i8mf4(__VA_ARGS__) |
| #define vadc_vvm_i8mf8 | ( | ... | ) | __riscv_vadc_vvm_i8mf8(__VA_ARGS__) |
| #define vadc_vvm_u16m1 | ( | ... | ) | __riscv_vadc_vvm_u16m1(__VA_ARGS__) |
| #define vadc_vvm_u16m2 | ( | ... | ) | __riscv_vadc_vvm_u16m2(__VA_ARGS__) |
| #define vadc_vvm_u16m4 | ( | ... | ) | __riscv_vadc_vvm_u16m4(__VA_ARGS__) |
| #define vadc_vvm_u16m8 | ( | ... | ) | __riscv_vadc_vvm_u16m8(__VA_ARGS__) |
| #define vadc_vvm_u16mf2 | ( | ... | ) | __riscv_vadc_vvm_u16mf2(__VA_ARGS__) |
| #define vadc_vvm_u16mf4 | ( | ... | ) | __riscv_vadc_vvm_u16mf4(__VA_ARGS__) |
| #define vadc_vvm_u32m1 | ( | ... | ) | __riscv_vadc_vvm_u32m1(__VA_ARGS__) |
| #define vadc_vvm_u32m2 | ( | ... | ) | __riscv_vadc_vvm_u32m2(__VA_ARGS__) |
| #define vadc_vvm_u32m4 | ( | ... | ) | __riscv_vadc_vvm_u32m4(__VA_ARGS__) |
| #define vadc_vvm_u32m8 | ( | ... | ) | __riscv_vadc_vvm_u32m8(__VA_ARGS__) |
| #define vadc_vvm_u32mf2 | ( | ... | ) | __riscv_vadc_vvm_u32mf2(__VA_ARGS__) |
| #define vadc_vvm_u64m1 | ( | ... | ) | __riscv_vadc_vvm_u64m1(__VA_ARGS__) |
| #define vadc_vvm_u64m2 | ( | ... | ) | __riscv_vadc_vvm_u64m2(__VA_ARGS__) |
| #define vadc_vvm_u64m4 | ( | ... | ) | __riscv_vadc_vvm_u64m4(__VA_ARGS__) |
| #define vadc_vvm_u64m8 | ( | ... | ) | __riscv_vadc_vvm_u64m8(__VA_ARGS__) |
| #define vadc_vvm_u8m1 | ( | ... | ) | __riscv_vadc_vvm_u8m1(__VA_ARGS__) |
| #define vadc_vvm_u8m2 | ( | ... | ) | __riscv_vadc_vvm_u8m2(__VA_ARGS__) |
| #define vadc_vvm_u8m4 | ( | ... | ) | __riscv_vadc_vvm_u8m4(__VA_ARGS__) |
| #define vadc_vvm_u8m8 | ( | ... | ) | __riscv_vadc_vvm_u8m8(__VA_ARGS__) |
| #define vadc_vvm_u8mf2 | ( | ... | ) | __riscv_vadc_vvm_u8mf2(__VA_ARGS__) |
| #define vadc_vvm_u8mf4 | ( | ... | ) | __riscv_vadc_vvm_u8mf4(__VA_ARGS__) |
| #define vadc_vvm_u8mf8 | ( | ... | ) | __riscv_vadc_vvm_u8mf8(__VA_ARGS__) |
| #define vadc_vxm_i16m1 | ( | ... | ) | __riscv_vadc_vxm_i16m1(__VA_ARGS__) |
| #define vadc_vxm_i16m2 | ( | ... | ) | __riscv_vadc_vxm_i16m2(__VA_ARGS__) |
| #define vadc_vxm_i16m4 | ( | ... | ) | __riscv_vadc_vxm_i16m4(__VA_ARGS__) |
| #define vadc_vxm_i16m8 | ( | ... | ) | __riscv_vadc_vxm_i16m8(__VA_ARGS__) |
| #define vadc_vxm_i16mf2 | ( | ... | ) | __riscv_vadc_vxm_i16mf2(__VA_ARGS__) |
| #define vadc_vxm_i16mf4 | ( | ... | ) | __riscv_vadc_vxm_i16mf4(__VA_ARGS__) |
| #define vadc_vxm_i32m1 | ( | ... | ) | __riscv_vadc_vxm_i32m1(__VA_ARGS__) |
| #define vadc_vxm_i32m2 | ( | ... | ) | __riscv_vadc_vxm_i32m2(__VA_ARGS__) |
| #define vadc_vxm_i32m4 | ( | ... | ) | __riscv_vadc_vxm_i32m4(__VA_ARGS__) |
| #define vadc_vxm_i32m8 | ( | ... | ) | __riscv_vadc_vxm_i32m8(__VA_ARGS__) |
| #define vadc_vxm_i32mf2 | ( | ... | ) | __riscv_vadc_vxm_i32mf2(__VA_ARGS__) |
| #define vadc_vxm_i64m1 | ( | ... | ) | __riscv_vadc_vxm_i64m1(__VA_ARGS__) |
| #define vadc_vxm_i64m2 | ( | ... | ) | __riscv_vadc_vxm_i64m2(__VA_ARGS__) |
| #define vadc_vxm_i64m4 | ( | ... | ) | __riscv_vadc_vxm_i64m4(__VA_ARGS__) |
| #define vadc_vxm_i64m8 | ( | ... | ) | __riscv_vadc_vxm_i64m8(__VA_ARGS__) |
| #define vadc_vxm_i8m1 | ( | ... | ) | __riscv_vadc_vxm_i8m1(__VA_ARGS__) |
| #define vadc_vxm_i8m2 | ( | ... | ) | __riscv_vadc_vxm_i8m2(__VA_ARGS__) |
| #define vadc_vxm_i8m4 | ( | ... | ) | __riscv_vadc_vxm_i8m4(__VA_ARGS__) |
| #define vadc_vxm_i8m8 | ( | ... | ) | __riscv_vadc_vxm_i8m8(__VA_ARGS__) |
| #define vadc_vxm_i8mf2 | ( | ... | ) | __riscv_vadc_vxm_i8mf2(__VA_ARGS__) |
| #define vadc_vxm_i8mf4 | ( | ... | ) | __riscv_vadc_vxm_i8mf4(__VA_ARGS__) |
| #define vadc_vxm_i8mf8 | ( | ... | ) | __riscv_vadc_vxm_i8mf8(__VA_ARGS__) |
| #define vadc_vxm_u16m1 | ( | ... | ) | __riscv_vadc_vxm_u16m1(__VA_ARGS__) |
| #define vadc_vxm_u16m2 | ( | ... | ) | __riscv_vadc_vxm_u16m2(__VA_ARGS__) |
| #define vadc_vxm_u16m4 | ( | ... | ) | __riscv_vadc_vxm_u16m4(__VA_ARGS__) |
| #define vadc_vxm_u16m8 | ( | ... | ) | __riscv_vadc_vxm_u16m8(__VA_ARGS__) |
| #define vadc_vxm_u16mf2 | ( | ... | ) | __riscv_vadc_vxm_u16mf2(__VA_ARGS__) |
| #define vadc_vxm_u16mf4 | ( | ... | ) | __riscv_vadc_vxm_u16mf4(__VA_ARGS__) |
| #define vadc_vxm_u32m1 | ( | ... | ) | __riscv_vadc_vxm_u32m1(__VA_ARGS__) |
| #define vadc_vxm_u32m2 | ( | ... | ) | __riscv_vadc_vxm_u32m2(__VA_ARGS__) |
| #define vadc_vxm_u32m4 | ( | ... | ) | __riscv_vadc_vxm_u32m4(__VA_ARGS__) |
| #define vadc_vxm_u32m8 | ( | ... | ) | __riscv_vadc_vxm_u32m8(__VA_ARGS__) |
| #define vadc_vxm_u32mf2 | ( | ... | ) | __riscv_vadc_vxm_u32mf2(__VA_ARGS__) |
| #define vadc_vxm_u64m1 | ( | ... | ) | __riscv_vadc_vxm_u64m1(__VA_ARGS__) |
| #define vadc_vxm_u64m2 | ( | ... | ) | __riscv_vadc_vxm_u64m2(__VA_ARGS__) |
| #define vadc_vxm_u64m4 | ( | ... | ) | __riscv_vadc_vxm_u64m4(__VA_ARGS__) |
| #define vadc_vxm_u64m8 | ( | ... | ) | __riscv_vadc_vxm_u64m8(__VA_ARGS__) |
| #define vadc_vxm_u8m1 | ( | ... | ) | __riscv_vadc_vxm_u8m1(__VA_ARGS__) |
| #define vadc_vxm_u8m2 | ( | ... | ) | __riscv_vadc_vxm_u8m2(__VA_ARGS__) |
| #define vadc_vxm_u8m4 | ( | ... | ) | __riscv_vadc_vxm_u8m4(__VA_ARGS__) |
| #define vadc_vxm_u8m8 | ( | ... | ) | __riscv_vadc_vxm_u8m8(__VA_ARGS__) |
| #define vadc_vxm_u8mf2 | ( | ... | ) | __riscv_vadc_vxm_u8mf2(__VA_ARGS__) |
| #define vadc_vxm_u8mf4 | ( | ... | ) | __riscv_vadc_vxm_u8mf4(__VA_ARGS__) |
| #define vadc_vxm_u8mf8 | ( | ... | ) | __riscv_vadc_vxm_u8mf8(__VA_ARGS__) |
| #define vadd_vv_i16m1 | ( | ... | ) | __riscv_vadd_vv_i16m1(__VA_ARGS__) |
| #define vadd_vv_i16m1_m | ( | ... | ) | __riscv_vadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define vadd_vv_i16m2 | ( | ... | ) | __riscv_vadd_vv_i16m2(__VA_ARGS__) |
| #define vadd_vv_i16m2_m | ( | ... | ) | __riscv_vadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define vadd_vv_i16m4 | ( | ... | ) | __riscv_vadd_vv_i16m4(__VA_ARGS__) |
| #define vadd_vv_i16m4_m | ( | ... | ) | __riscv_vadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define vadd_vv_i16m8 | ( | ... | ) | __riscv_vadd_vv_i16m8(__VA_ARGS__) |
| #define vadd_vv_i16m8_m | ( | ... | ) | __riscv_vadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define vadd_vv_i16mf2 | ( | ... | ) | __riscv_vadd_vv_i16mf2(__VA_ARGS__) |
| #define vadd_vv_i16mf2_m | ( | ... | ) | __riscv_vadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vadd_vv_i16mf4 | ( | ... | ) | __riscv_vadd_vv_i16mf4(__VA_ARGS__) |
| #define vadd_vv_i16mf4_m | ( | ... | ) | __riscv_vadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vadd_vv_i32m1 | ( | ... | ) | __riscv_vadd_vv_i32m1(__VA_ARGS__) |
| #define vadd_vv_i32m1_m | ( | ... | ) | __riscv_vadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define vadd_vv_i32m2 | ( | ... | ) | __riscv_vadd_vv_i32m2(__VA_ARGS__) |
| #define vadd_vv_i32m2_m | ( | ... | ) | __riscv_vadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define vadd_vv_i32m4 | ( | ... | ) | __riscv_vadd_vv_i32m4(__VA_ARGS__) |
| #define vadd_vv_i32m4_m | ( | ... | ) | __riscv_vadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define vadd_vv_i32m8 | ( | ... | ) | __riscv_vadd_vv_i32m8(__VA_ARGS__) |
| #define vadd_vv_i32m8_m | ( | ... | ) | __riscv_vadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define vadd_vv_i32mf2 | ( | ... | ) | __riscv_vadd_vv_i32mf2(__VA_ARGS__) |
| #define vadd_vv_i32mf2_m | ( | ... | ) | __riscv_vadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vadd_vv_i64m1 | ( | ... | ) | __riscv_vadd_vv_i64m1(__VA_ARGS__) |
| #define vadd_vv_i64m1_m | ( | ... | ) | __riscv_vadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define vadd_vv_i64m2 | ( | ... | ) | __riscv_vadd_vv_i64m2(__VA_ARGS__) |
| #define vadd_vv_i64m2_m | ( | ... | ) | __riscv_vadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define vadd_vv_i64m4 | ( | ... | ) | __riscv_vadd_vv_i64m4(__VA_ARGS__) |
| #define vadd_vv_i64m4_m | ( | ... | ) | __riscv_vadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define vadd_vv_i64m8 | ( | ... | ) | __riscv_vadd_vv_i64m8(__VA_ARGS__) |
| #define vadd_vv_i64m8_m | ( | ... | ) | __riscv_vadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define vadd_vv_i8m1 | ( | ... | ) | __riscv_vadd_vv_i8m1(__VA_ARGS__) |
| #define vadd_vv_i8m1_m | ( | ... | ) | __riscv_vadd_vv_i8m1_tumu(__VA_ARGS__) |
| #define vadd_vv_i8m2 | ( | ... | ) | __riscv_vadd_vv_i8m2(__VA_ARGS__) |
| #define vadd_vv_i8m2_m | ( | ... | ) | __riscv_vadd_vv_i8m2_tumu(__VA_ARGS__) |
| #define vadd_vv_i8m4 | ( | ... | ) | __riscv_vadd_vv_i8m4(__VA_ARGS__) |
| #define vadd_vv_i8m4_m | ( | ... | ) | __riscv_vadd_vv_i8m4_tumu(__VA_ARGS__) |
| #define vadd_vv_i8m8 | ( | ... | ) | __riscv_vadd_vv_i8m8(__VA_ARGS__) |
| #define vadd_vv_i8m8_m | ( | ... | ) | __riscv_vadd_vv_i8m8_tumu(__VA_ARGS__) |
| #define vadd_vv_i8mf2 | ( | ... | ) | __riscv_vadd_vv_i8mf2(__VA_ARGS__) |
| #define vadd_vv_i8mf2_m | ( | ... | ) | __riscv_vadd_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vadd_vv_i8mf4 | ( | ... | ) | __riscv_vadd_vv_i8mf4(__VA_ARGS__) |
| #define vadd_vv_i8mf4_m | ( | ... | ) | __riscv_vadd_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vadd_vv_i8mf8 | ( | ... | ) | __riscv_vadd_vv_i8mf8(__VA_ARGS__) |
| #define vadd_vv_i8mf8_m | ( | ... | ) | __riscv_vadd_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vadd_vv_u16m1 | ( | ... | ) | __riscv_vadd_vv_u16m1(__VA_ARGS__) |
| #define vadd_vv_u16m1_m | ( | ... | ) | __riscv_vadd_vv_u16m1_tumu(__VA_ARGS__) |
| #define vadd_vv_u16m2 | ( | ... | ) | __riscv_vadd_vv_u16m2(__VA_ARGS__) |
| #define vadd_vv_u16m2_m | ( | ... | ) | __riscv_vadd_vv_u16m2_tumu(__VA_ARGS__) |
| #define vadd_vv_u16m4 | ( | ... | ) | __riscv_vadd_vv_u16m4(__VA_ARGS__) |
| #define vadd_vv_u16m4_m | ( | ... | ) | __riscv_vadd_vv_u16m4_tumu(__VA_ARGS__) |
| #define vadd_vv_u16m8 | ( | ... | ) | __riscv_vadd_vv_u16m8(__VA_ARGS__) |
| #define vadd_vv_u16m8_m | ( | ... | ) | __riscv_vadd_vv_u16m8_tumu(__VA_ARGS__) |
| #define vadd_vv_u16mf2 | ( | ... | ) | __riscv_vadd_vv_u16mf2(__VA_ARGS__) |
| #define vadd_vv_u16mf2_m | ( | ... | ) | __riscv_vadd_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vadd_vv_u16mf4 | ( | ... | ) | __riscv_vadd_vv_u16mf4(__VA_ARGS__) |
| #define vadd_vv_u16mf4_m | ( | ... | ) | __riscv_vadd_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vadd_vv_u32m1 | ( | ... | ) | __riscv_vadd_vv_u32m1(__VA_ARGS__) |
| #define vadd_vv_u32m1_m | ( | ... | ) | __riscv_vadd_vv_u32m1_tumu(__VA_ARGS__) |
| #define vadd_vv_u32m2 | ( | ... | ) | __riscv_vadd_vv_u32m2(__VA_ARGS__) |
| #define vadd_vv_u32m2_m | ( | ... | ) | __riscv_vadd_vv_u32m2_tumu(__VA_ARGS__) |
| #define vadd_vv_u32m4 | ( | ... | ) | __riscv_vadd_vv_u32m4(__VA_ARGS__) |
| #define vadd_vv_u32m4_m | ( | ... | ) | __riscv_vadd_vv_u32m4_tumu(__VA_ARGS__) |
| #define vadd_vv_u32m8 | ( | ... | ) | __riscv_vadd_vv_u32m8(__VA_ARGS__) |
| #define vadd_vv_u32m8_m | ( | ... | ) | __riscv_vadd_vv_u32m8_tumu(__VA_ARGS__) |
| #define vadd_vv_u32mf2 | ( | ... | ) | __riscv_vadd_vv_u32mf2(__VA_ARGS__) |
| #define vadd_vv_u32mf2_m | ( | ... | ) | __riscv_vadd_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vadd_vv_u64m1 | ( | ... | ) | __riscv_vadd_vv_u64m1(__VA_ARGS__) |
| #define vadd_vv_u64m1_m | ( | ... | ) | __riscv_vadd_vv_u64m1_tumu(__VA_ARGS__) |
| #define vadd_vv_u64m2 | ( | ... | ) | __riscv_vadd_vv_u64m2(__VA_ARGS__) |
| #define vadd_vv_u64m2_m | ( | ... | ) | __riscv_vadd_vv_u64m2_tumu(__VA_ARGS__) |
| #define vadd_vv_u64m4 | ( | ... | ) | __riscv_vadd_vv_u64m4(__VA_ARGS__) |
| #define vadd_vv_u64m4_m | ( | ... | ) | __riscv_vadd_vv_u64m4_tumu(__VA_ARGS__) |
| #define vadd_vv_u64m8 | ( | ... | ) | __riscv_vadd_vv_u64m8(__VA_ARGS__) |
| #define vadd_vv_u64m8_m | ( | ... | ) | __riscv_vadd_vv_u64m8_tumu(__VA_ARGS__) |
| #define vadd_vv_u8m1 | ( | ... | ) | __riscv_vadd_vv_u8m1(__VA_ARGS__) |
| #define vadd_vv_u8m1_m | ( | ... | ) | __riscv_vadd_vv_u8m1_tumu(__VA_ARGS__) |
| #define vadd_vv_u8m2 | ( | ... | ) | __riscv_vadd_vv_u8m2(__VA_ARGS__) |
| #define vadd_vv_u8m2_m | ( | ... | ) | __riscv_vadd_vv_u8m2_tumu(__VA_ARGS__) |
| #define vadd_vv_u8m4 | ( | ... | ) | __riscv_vadd_vv_u8m4(__VA_ARGS__) |
| #define vadd_vv_u8m4_m | ( | ... | ) | __riscv_vadd_vv_u8m4_tumu(__VA_ARGS__) |
| #define vadd_vv_u8m8 | ( | ... | ) | __riscv_vadd_vv_u8m8(__VA_ARGS__) |
| #define vadd_vv_u8m8_m | ( | ... | ) | __riscv_vadd_vv_u8m8_tumu(__VA_ARGS__) |
| #define vadd_vv_u8mf2 | ( | ... | ) | __riscv_vadd_vv_u8mf2(__VA_ARGS__) |
| #define vadd_vv_u8mf2_m | ( | ... | ) | __riscv_vadd_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vadd_vv_u8mf4 | ( | ... | ) | __riscv_vadd_vv_u8mf4(__VA_ARGS__) |
| #define vadd_vv_u8mf4_m | ( | ... | ) | __riscv_vadd_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vadd_vv_u8mf8 | ( | ... | ) | __riscv_vadd_vv_u8mf8(__VA_ARGS__) |
| #define vadd_vv_u8mf8_m | ( | ... | ) | __riscv_vadd_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vadd_vx_i16m1 | ( | ... | ) | __riscv_vadd_vx_i16m1(__VA_ARGS__) |
| #define vadd_vx_i16m1_m | ( | ... | ) | __riscv_vadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define vadd_vx_i16m2 | ( | ... | ) | __riscv_vadd_vx_i16m2(__VA_ARGS__) |
| #define vadd_vx_i16m2_m | ( | ... | ) | __riscv_vadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define vadd_vx_i16m4 | ( | ... | ) | __riscv_vadd_vx_i16m4(__VA_ARGS__) |
| #define vadd_vx_i16m4_m | ( | ... | ) | __riscv_vadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define vadd_vx_i16m8 | ( | ... | ) | __riscv_vadd_vx_i16m8(__VA_ARGS__) |
| #define vadd_vx_i16m8_m | ( | ... | ) | __riscv_vadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define vadd_vx_i16mf2 | ( | ... | ) | __riscv_vadd_vx_i16mf2(__VA_ARGS__) |
| #define vadd_vx_i16mf2_m | ( | ... | ) | __riscv_vadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vadd_vx_i16mf4 | ( | ... | ) | __riscv_vadd_vx_i16mf4(__VA_ARGS__) |
| #define vadd_vx_i16mf4_m | ( | ... | ) | __riscv_vadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vadd_vx_i32m1 | ( | ... | ) | __riscv_vadd_vx_i32m1(__VA_ARGS__) |
| #define vadd_vx_i32m1_m | ( | ... | ) | __riscv_vadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define vadd_vx_i32m2 | ( | ... | ) | __riscv_vadd_vx_i32m2(__VA_ARGS__) |
| #define vadd_vx_i32m2_m | ( | ... | ) | __riscv_vadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define vadd_vx_i32m4 | ( | ... | ) | __riscv_vadd_vx_i32m4(__VA_ARGS__) |
| #define vadd_vx_i32m4_m | ( | ... | ) | __riscv_vadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define vadd_vx_i32m8 | ( | ... | ) | __riscv_vadd_vx_i32m8(__VA_ARGS__) |
| #define vadd_vx_i32m8_m | ( | ... | ) | __riscv_vadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define vadd_vx_i32mf2 | ( | ... | ) | __riscv_vadd_vx_i32mf2(__VA_ARGS__) |
| #define vadd_vx_i32mf2_m | ( | ... | ) | __riscv_vadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vadd_vx_i64m1 | ( | ... | ) | __riscv_vadd_vx_i64m1(__VA_ARGS__) |
| #define vadd_vx_i64m1_m | ( | ... | ) | __riscv_vadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define vadd_vx_i64m2 | ( | ... | ) | __riscv_vadd_vx_i64m2(__VA_ARGS__) |
| #define vadd_vx_i64m2_m | ( | ... | ) | __riscv_vadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define vadd_vx_i64m4 | ( | ... | ) | __riscv_vadd_vx_i64m4(__VA_ARGS__) |
| #define vadd_vx_i64m4_m | ( | ... | ) | __riscv_vadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define vadd_vx_i64m8 | ( | ... | ) | __riscv_vadd_vx_i64m8(__VA_ARGS__) |
| #define vadd_vx_i64m8_m | ( | ... | ) | __riscv_vadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define vadd_vx_i8m1 | ( | ... | ) | __riscv_vadd_vx_i8m1(__VA_ARGS__) |
| #define vadd_vx_i8m1_m | ( | ... | ) | __riscv_vadd_vx_i8m1_tumu(__VA_ARGS__) |
| #define vadd_vx_i8m2 | ( | ... | ) | __riscv_vadd_vx_i8m2(__VA_ARGS__) |
| #define vadd_vx_i8m2_m | ( | ... | ) | __riscv_vadd_vx_i8m2_tumu(__VA_ARGS__) |
| #define vadd_vx_i8m4 | ( | ... | ) | __riscv_vadd_vx_i8m4(__VA_ARGS__) |
| #define vadd_vx_i8m4_m | ( | ... | ) | __riscv_vadd_vx_i8m4_tumu(__VA_ARGS__) |
| #define vadd_vx_i8m8 | ( | ... | ) | __riscv_vadd_vx_i8m8(__VA_ARGS__) |
| #define vadd_vx_i8m8_m | ( | ... | ) | __riscv_vadd_vx_i8m8_tumu(__VA_ARGS__) |
| #define vadd_vx_i8mf2 | ( | ... | ) | __riscv_vadd_vx_i8mf2(__VA_ARGS__) |
| #define vadd_vx_i8mf2_m | ( | ... | ) | __riscv_vadd_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vadd_vx_i8mf4 | ( | ... | ) | __riscv_vadd_vx_i8mf4(__VA_ARGS__) |
| #define vadd_vx_i8mf4_m | ( | ... | ) | __riscv_vadd_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vadd_vx_i8mf8 | ( | ... | ) | __riscv_vadd_vx_i8mf8(__VA_ARGS__) |
| #define vadd_vx_i8mf8_m | ( | ... | ) | __riscv_vadd_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vadd_vx_u16m1 | ( | ... | ) | __riscv_vadd_vx_u16m1(__VA_ARGS__) |
| #define vadd_vx_u16m1_m | ( | ... | ) | __riscv_vadd_vx_u16m1_tumu(__VA_ARGS__) |
| #define vadd_vx_u16m2 | ( | ... | ) | __riscv_vadd_vx_u16m2(__VA_ARGS__) |
| #define vadd_vx_u16m2_m | ( | ... | ) | __riscv_vadd_vx_u16m2_tumu(__VA_ARGS__) |
| #define vadd_vx_u16m4 | ( | ... | ) | __riscv_vadd_vx_u16m4(__VA_ARGS__) |
| #define vadd_vx_u16m4_m | ( | ... | ) | __riscv_vadd_vx_u16m4_tumu(__VA_ARGS__) |
| #define vadd_vx_u16m8 | ( | ... | ) | __riscv_vadd_vx_u16m8(__VA_ARGS__) |
| #define vadd_vx_u16m8_m | ( | ... | ) | __riscv_vadd_vx_u16m8_tumu(__VA_ARGS__) |
| #define vadd_vx_u16mf2 | ( | ... | ) | __riscv_vadd_vx_u16mf2(__VA_ARGS__) |
| #define vadd_vx_u16mf2_m | ( | ... | ) | __riscv_vadd_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vadd_vx_u16mf4 | ( | ... | ) | __riscv_vadd_vx_u16mf4(__VA_ARGS__) |
| #define vadd_vx_u16mf4_m | ( | ... | ) | __riscv_vadd_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vadd_vx_u32m1 | ( | ... | ) | __riscv_vadd_vx_u32m1(__VA_ARGS__) |
| #define vadd_vx_u32m1_m | ( | ... | ) | __riscv_vadd_vx_u32m1_tumu(__VA_ARGS__) |
| #define vadd_vx_u32m2 | ( | ... | ) | __riscv_vadd_vx_u32m2(__VA_ARGS__) |
| #define vadd_vx_u32m2_m | ( | ... | ) | __riscv_vadd_vx_u32m2_tumu(__VA_ARGS__) |
| #define vadd_vx_u32m4 | ( | ... | ) | __riscv_vadd_vx_u32m4(__VA_ARGS__) |
| #define vadd_vx_u32m4_m | ( | ... | ) | __riscv_vadd_vx_u32m4_tumu(__VA_ARGS__) |
| #define vadd_vx_u32m8 | ( | ... | ) | __riscv_vadd_vx_u32m8(__VA_ARGS__) |
| #define vadd_vx_u32m8_m | ( | ... | ) | __riscv_vadd_vx_u32m8_tumu(__VA_ARGS__) |
| #define vadd_vx_u32mf2 | ( | ... | ) | __riscv_vadd_vx_u32mf2(__VA_ARGS__) |
| #define vadd_vx_u32mf2_m | ( | ... | ) | __riscv_vadd_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vadd_vx_u64m1 | ( | ... | ) | __riscv_vadd_vx_u64m1(__VA_ARGS__) |
| #define vadd_vx_u64m1_m | ( | ... | ) | __riscv_vadd_vx_u64m1_tumu(__VA_ARGS__) |
| #define vadd_vx_u64m2 | ( | ... | ) | __riscv_vadd_vx_u64m2(__VA_ARGS__) |
| #define vadd_vx_u64m2_m | ( | ... | ) | __riscv_vadd_vx_u64m2_tumu(__VA_ARGS__) |
| #define vadd_vx_u64m4 | ( | ... | ) | __riscv_vadd_vx_u64m4(__VA_ARGS__) |
| #define vadd_vx_u64m4_m | ( | ... | ) | __riscv_vadd_vx_u64m4_tumu(__VA_ARGS__) |
| #define vadd_vx_u64m8 | ( | ... | ) | __riscv_vadd_vx_u64m8(__VA_ARGS__) |
| #define vadd_vx_u64m8_m | ( | ... | ) | __riscv_vadd_vx_u64m8_tumu(__VA_ARGS__) |
| #define vadd_vx_u8m1 | ( | ... | ) | __riscv_vadd_vx_u8m1(__VA_ARGS__) |
| #define vadd_vx_u8m1_m | ( | ... | ) | __riscv_vadd_vx_u8m1_tumu(__VA_ARGS__) |
| #define vadd_vx_u8m2 | ( | ... | ) | __riscv_vadd_vx_u8m2(__VA_ARGS__) |
| #define vadd_vx_u8m2_m | ( | ... | ) | __riscv_vadd_vx_u8m2_tumu(__VA_ARGS__) |
| #define vadd_vx_u8m4 | ( | ... | ) | __riscv_vadd_vx_u8m4(__VA_ARGS__) |
| #define vadd_vx_u8m4_m | ( | ... | ) | __riscv_vadd_vx_u8m4_tumu(__VA_ARGS__) |
| #define vadd_vx_u8m8 | ( | ... | ) | __riscv_vadd_vx_u8m8(__VA_ARGS__) |
| #define vadd_vx_u8m8_m | ( | ... | ) | __riscv_vadd_vx_u8m8_tumu(__VA_ARGS__) |
| #define vadd_vx_u8mf2 | ( | ... | ) | __riscv_vadd_vx_u8mf2(__VA_ARGS__) |
| #define vadd_vx_u8mf2_m | ( | ... | ) | __riscv_vadd_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vadd_vx_u8mf4 | ( | ... | ) | __riscv_vadd_vx_u8mf4(__VA_ARGS__) |
| #define vadd_vx_u8mf4_m | ( | ... | ) | __riscv_vadd_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vadd_vx_u8mf8 | ( | ... | ) | __riscv_vadd_vx_u8mf8(__VA_ARGS__) |
| #define vadd_vx_u8mf8_m | ( | ... | ) | __riscv_vadd_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vand_vv_i16m1 | ( | ... | ) | __riscv_vand_vv_i16m1(__VA_ARGS__) |
| #define vand_vv_i16m1_m | ( | ... | ) | __riscv_vand_vv_i16m1_tumu(__VA_ARGS__) |
| #define vand_vv_i16m2 | ( | ... | ) | __riscv_vand_vv_i16m2(__VA_ARGS__) |
| #define vand_vv_i16m2_m | ( | ... | ) | __riscv_vand_vv_i16m2_tumu(__VA_ARGS__) |
| #define vand_vv_i16m4 | ( | ... | ) | __riscv_vand_vv_i16m4(__VA_ARGS__) |
| #define vand_vv_i16m4_m | ( | ... | ) | __riscv_vand_vv_i16m4_tumu(__VA_ARGS__) |
| #define vand_vv_i16m8 | ( | ... | ) | __riscv_vand_vv_i16m8(__VA_ARGS__) |
| #define vand_vv_i16m8_m | ( | ... | ) | __riscv_vand_vv_i16m8_tumu(__VA_ARGS__) |
| #define vand_vv_i16mf2 | ( | ... | ) | __riscv_vand_vv_i16mf2(__VA_ARGS__) |
| #define vand_vv_i16mf2_m | ( | ... | ) | __riscv_vand_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vand_vv_i16mf4 | ( | ... | ) | __riscv_vand_vv_i16mf4(__VA_ARGS__) |
| #define vand_vv_i16mf4_m | ( | ... | ) | __riscv_vand_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vand_vv_i32m1 | ( | ... | ) | __riscv_vand_vv_i32m1(__VA_ARGS__) |
| #define vand_vv_i32m1_m | ( | ... | ) | __riscv_vand_vv_i32m1_tumu(__VA_ARGS__) |
| #define vand_vv_i32m2 | ( | ... | ) | __riscv_vand_vv_i32m2(__VA_ARGS__) |
| #define vand_vv_i32m2_m | ( | ... | ) | __riscv_vand_vv_i32m2_tumu(__VA_ARGS__) |
| #define vand_vv_i32m4 | ( | ... | ) | __riscv_vand_vv_i32m4(__VA_ARGS__) |
| #define vand_vv_i32m4_m | ( | ... | ) | __riscv_vand_vv_i32m4_tumu(__VA_ARGS__) |
| #define vand_vv_i32m8 | ( | ... | ) | __riscv_vand_vv_i32m8(__VA_ARGS__) |
| #define vand_vv_i32m8_m | ( | ... | ) | __riscv_vand_vv_i32m8_tumu(__VA_ARGS__) |
| #define vand_vv_i32mf2 | ( | ... | ) | __riscv_vand_vv_i32mf2(__VA_ARGS__) |
| #define vand_vv_i32mf2_m | ( | ... | ) | __riscv_vand_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vand_vv_i64m1 | ( | ... | ) | __riscv_vand_vv_i64m1(__VA_ARGS__) |
| #define vand_vv_i64m1_m | ( | ... | ) | __riscv_vand_vv_i64m1_tumu(__VA_ARGS__) |
| #define vand_vv_i64m2 | ( | ... | ) | __riscv_vand_vv_i64m2(__VA_ARGS__) |
| #define vand_vv_i64m2_m | ( | ... | ) | __riscv_vand_vv_i64m2_tumu(__VA_ARGS__) |
| #define vand_vv_i64m4 | ( | ... | ) | __riscv_vand_vv_i64m4(__VA_ARGS__) |
| #define vand_vv_i64m4_m | ( | ... | ) | __riscv_vand_vv_i64m4_tumu(__VA_ARGS__) |
| #define vand_vv_i64m8 | ( | ... | ) | __riscv_vand_vv_i64m8(__VA_ARGS__) |
| #define vand_vv_i64m8_m | ( | ... | ) | __riscv_vand_vv_i64m8_tumu(__VA_ARGS__) |
| #define vand_vv_i8m1 | ( | ... | ) | __riscv_vand_vv_i8m1(__VA_ARGS__) |
| #define vand_vv_i8m1_m | ( | ... | ) | __riscv_vand_vv_i8m1_tumu(__VA_ARGS__) |
| #define vand_vv_i8m2 | ( | ... | ) | __riscv_vand_vv_i8m2(__VA_ARGS__) |
| #define vand_vv_i8m2_m | ( | ... | ) | __riscv_vand_vv_i8m2_tumu(__VA_ARGS__) |
| #define vand_vv_i8m4 | ( | ... | ) | __riscv_vand_vv_i8m4(__VA_ARGS__) |
| #define vand_vv_i8m4_m | ( | ... | ) | __riscv_vand_vv_i8m4_tumu(__VA_ARGS__) |
| #define vand_vv_i8m8 | ( | ... | ) | __riscv_vand_vv_i8m8(__VA_ARGS__) |
| #define vand_vv_i8m8_m | ( | ... | ) | __riscv_vand_vv_i8m8_tumu(__VA_ARGS__) |
| #define vand_vv_i8mf2 | ( | ... | ) | __riscv_vand_vv_i8mf2(__VA_ARGS__) |
| #define vand_vv_i8mf2_m | ( | ... | ) | __riscv_vand_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vand_vv_i8mf4 | ( | ... | ) | __riscv_vand_vv_i8mf4(__VA_ARGS__) |
| #define vand_vv_i8mf4_m | ( | ... | ) | __riscv_vand_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vand_vv_i8mf8 | ( | ... | ) | __riscv_vand_vv_i8mf8(__VA_ARGS__) |
| #define vand_vv_i8mf8_m | ( | ... | ) | __riscv_vand_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vand_vv_u16m1 | ( | ... | ) | __riscv_vand_vv_u16m1(__VA_ARGS__) |
| #define vand_vv_u16m1_m | ( | ... | ) | __riscv_vand_vv_u16m1_tumu(__VA_ARGS__) |
| #define vand_vv_u16m2 | ( | ... | ) | __riscv_vand_vv_u16m2(__VA_ARGS__) |
| #define vand_vv_u16m2_m | ( | ... | ) | __riscv_vand_vv_u16m2_tumu(__VA_ARGS__) |
| #define vand_vv_u16m4 | ( | ... | ) | __riscv_vand_vv_u16m4(__VA_ARGS__) |
| #define vand_vv_u16m4_m | ( | ... | ) | __riscv_vand_vv_u16m4_tumu(__VA_ARGS__) |
| #define vand_vv_u16m8 | ( | ... | ) | __riscv_vand_vv_u16m8(__VA_ARGS__) |
| #define vand_vv_u16m8_m | ( | ... | ) | __riscv_vand_vv_u16m8_tumu(__VA_ARGS__) |
| #define vand_vv_u16mf2 | ( | ... | ) | __riscv_vand_vv_u16mf2(__VA_ARGS__) |
| #define vand_vv_u16mf2_m | ( | ... | ) | __riscv_vand_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vand_vv_u16mf4 | ( | ... | ) | __riscv_vand_vv_u16mf4(__VA_ARGS__) |
| #define vand_vv_u16mf4_m | ( | ... | ) | __riscv_vand_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vand_vv_u32m1 | ( | ... | ) | __riscv_vand_vv_u32m1(__VA_ARGS__) |
| #define vand_vv_u32m1_m | ( | ... | ) | __riscv_vand_vv_u32m1_tumu(__VA_ARGS__) |
| #define vand_vv_u32m2 | ( | ... | ) | __riscv_vand_vv_u32m2(__VA_ARGS__) |
| #define vand_vv_u32m2_m | ( | ... | ) | __riscv_vand_vv_u32m2_tumu(__VA_ARGS__) |
| #define vand_vv_u32m4 | ( | ... | ) | __riscv_vand_vv_u32m4(__VA_ARGS__) |
| #define vand_vv_u32m4_m | ( | ... | ) | __riscv_vand_vv_u32m4_tumu(__VA_ARGS__) |
| #define vand_vv_u32m8 | ( | ... | ) | __riscv_vand_vv_u32m8(__VA_ARGS__) |
| #define vand_vv_u32m8_m | ( | ... | ) | __riscv_vand_vv_u32m8_tumu(__VA_ARGS__) |
| #define vand_vv_u32mf2 | ( | ... | ) | __riscv_vand_vv_u32mf2(__VA_ARGS__) |
| #define vand_vv_u32mf2_m | ( | ... | ) | __riscv_vand_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vand_vv_u64m1 | ( | ... | ) | __riscv_vand_vv_u64m1(__VA_ARGS__) |
| #define vand_vv_u64m1_m | ( | ... | ) | __riscv_vand_vv_u64m1_tumu(__VA_ARGS__) |
| #define vand_vv_u64m2 | ( | ... | ) | __riscv_vand_vv_u64m2(__VA_ARGS__) |
| #define vand_vv_u64m2_m | ( | ... | ) | __riscv_vand_vv_u64m2_tumu(__VA_ARGS__) |
| #define vand_vv_u64m4 | ( | ... | ) | __riscv_vand_vv_u64m4(__VA_ARGS__) |
| #define vand_vv_u64m4_m | ( | ... | ) | __riscv_vand_vv_u64m4_tumu(__VA_ARGS__) |
| #define vand_vv_u64m8 | ( | ... | ) | __riscv_vand_vv_u64m8(__VA_ARGS__) |
| #define vand_vv_u64m8_m | ( | ... | ) | __riscv_vand_vv_u64m8_tumu(__VA_ARGS__) |
| #define vand_vv_u8m1 | ( | ... | ) | __riscv_vand_vv_u8m1(__VA_ARGS__) |
| #define vand_vv_u8m1_m | ( | ... | ) | __riscv_vand_vv_u8m1_tumu(__VA_ARGS__) |
| #define vand_vv_u8m2 | ( | ... | ) | __riscv_vand_vv_u8m2(__VA_ARGS__) |
| #define vand_vv_u8m2_m | ( | ... | ) | __riscv_vand_vv_u8m2_tumu(__VA_ARGS__) |
| #define vand_vv_u8m4 | ( | ... | ) | __riscv_vand_vv_u8m4(__VA_ARGS__) |
| #define vand_vv_u8m4_m | ( | ... | ) | __riscv_vand_vv_u8m4_tumu(__VA_ARGS__) |
| #define vand_vv_u8m8 | ( | ... | ) | __riscv_vand_vv_u8m8(__VA_ARGS__) |
| #define vand_vv_u8m8_m | ( | ... | ) | __riscv_vand_vv_u8m8_tumu(__VA_ARGS__) |
| #define vand_vv_u8mf2 | ( | ... | ) | __riscv_vand_vv_u8mf2(__VA_ARGS__) |
| #define vand_vv_u8mf2_m | ( | ... | ) | __riscv_vand_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vand_vv_u8mf4 | ( | ... | ) | __riscv_vand_vv_u8mf4(__VA_ARGS__) |
| #define vand_vv_u8mf4_m | ( | ... | ) | __riscv_vand_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vand_vv_u8mf8 | ( | ... | ) | __riscv_vand_vv_u8mf8(__VA_ARGS__) |
| #define vand_vv_u8mf8_m | ( | ... | ) | __riscv_vand_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vand_vx_i16m1 | ( | ... | ) | __riscv_vand_vx_i16m1(__VA_ARGS__) |
| #define vand_vx_i16m1_m | ( | ... | ) | __riscv_vand_vx_i16m1_tumu(__VA_ARGS__) |
| #define vand_vx_i16m2 | ( | ... | ) | __riscv_vand_vx_i16m2(__VA_ARGS__) |
| #define vand_vx_i16m2_m | ( | ... | ) | __riscv_vand_vx_i16m2_tumu(__VA_ARGS__) |
| #define vand_vx_i16m4 | ( | ... | ) | __riscv_vand_vx_i16m4(__VA_ARGS__) |
| #define vand_vx_i16m4_m | ( | ... | ) | __riscv_vand_vx_i16m4_tumu(__VA_ARGS__) |
| #define vand_vx_i16m8 | ( | ... | ) | __riscv_vand_vx_i16m8(__VA_ARGS__) |
| #define vand_vx_i16m8_m | ( | ... | ) | __riscv_vand_vx_i16m8_tumu(__VA_ARGS__) |
| #define vand_vx_i16mf2 | ( | ... | ) | __riscv_vand_vx_i16mf2(__VA_ARGS__) |
| #define vand_vx_i16mf2_m | ( | ... | ) | __riscv_vand_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vand_vx_i16mf4 | ( | ... | ) | __riscv_vand_vx_i16mf4(__VA_ARGS__) |
| #define vand_vx_i16mf4_m | ( | ... | ) | __riscv_vand_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vand_vx_i32m1 | ( | ... | ) | __riscv_vand_vx_i32m1(__VA_ARGS__) |
| #define vand_vx_i32m1_m | ( | ... | ) | __riscv_vand_vx_i32m1_tumu(__VA_ARGS__) |
| #define vand_vx_i32m2 | ( | ... | ) | __riscv_vand_vx_i32m2(__VA_ARGS__) |
| #define vand_vx_i32m2_m | ( | ... | ) | __riscv_vand_vx_i32m2_tumu(__VA_ARGS__) |
| #define vand_vx_i32m4 | ( | ... | ) | __riscv_vand_vx_i32m4(__VA_ARGS__) |
| #define vand_vx_i32m4_m | ( | ... | ) | __riscv_vand_vx_i32m4_tumu(__VA_ARGS__) |
| #define vand_vx_i32m8 | ( | ... | ) | __riscv_vand_vx_i32m8(__VA_ARGS__) |
| #define vand_vx_i32m8_m | ( | ... | ) | __riscv_vand_vx_i32m8_tumu(__VA_ARGS__) |
| #define vand_vx_i32mf2 | ( | ... | ) | __riscv_vand_vx_i32mf2(__VA_ARGS__) |
| #define vand_vx_i32mf2_m | ( | ... | ) | __riscv_vand_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vand_vx_i64m1 | ( | ... | ) | __riscv_vand_vx_i64m1(__VA_ARGS__) |
| #define vand_vx_i64m1_m | ( | ... | ) | __riscv_vand_vx_i64m1_tumu(__VA_ARGS__) |
| #define vand_vx_i64m2 | ( | ... | ) | __riscv_vand_vx_i64m2(__VA_ARGS__) |
| #define vand_vx_i64m2_m | ( | ... | ) | __riscv_vand_vx_i64m2_tumu(__VA_ARGS__) |
| #define vand_vx_i64m4 | ( | ... | ) | __riscv_vand_vx_i64m4(__VA_ARGS__) |
| #define vand_vx_i64m4_m | ( | ... | ) | __riscv_vand_vx_i64m4_tumu(__VA_ARGS__) |
| #define vand_vx_i64m8 | ( | ... | ) | __riscv_vand_vx_i64m8(__VA_ARGS__) |
| #define vand_vx_i64m8_m | ( | ... | ) | __riscv_vand_vx_i64m8_tumu(__VA_ARGS__) |
| #define vand_vx_i8m1 | ( | ... | ) | __riscv_vand_vx_i8m1(__VA_ARGS__) |
| #define vand_vx_i8m1_m | ( | ... | ) | __riscv_vand_vx_i8m1_tumu(__VA_ARGS__) |
| #define vand_vx_i8m2 | ( | ... | ) | __riscv_vand_vx_i8m2(__VA_ARGS__) |
| #define vand_vx_i8m2_m | ( | ... | ) | __riscv_vand_vx_i8m2_tumu(__VA_ARGS__) |
| #define vand_vx_i8m4 | ( | ... | ) | __riscv_vand_vx_i8m4(__VA_ARGS__) |
| #define vand_vx_i8m4_m | ( | ... | ) | __riscv_vand_vx_i8m4_tumu(__VA_ARGS__) |
| #define vand_vx_i8m8 | ( | ... | ) | __riscv_vand_vx_i8m8(__VA_ARGS__) |
| #define vand_vx_i8m8_m | ( | ... | ) | __riscv_vand_vx_i8m8_tumu(__VA_ARGS__) |
| #define vand_vx_i8mf2 | ( | ... | ) | __riscv_vand_vx_i8mf2(__VA_ARGS__) |
| #define vand_vx_i8mf2_m | ( | ... | ) | __riscv_vand_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vand_vx_i8mf4 | ( | ... | ) | __riscv_vand_vx_i8mf4(__VA_ARGS__) |
| #define vand_vx_i8mf4_m | ( | ... | ) | __riscv_vand_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vand_vx_i8mf8 | ( | ... | ) | __riscv_vand_vx_i8mf8(__VA_ARGS__) |
| #define vand_vx_i8mf8_m | ( | ... | ) | __riscv_vand_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vand_vx_u16m1 | ( | ... | ) | __riscv_vand_vx_u16m1(__VA_ARGS__) |
| #define vand_vx_u16m1_m | ( | ... | ) | __riscv_vand_vx_u16m1_tumu(__VA_ARGS__) |
| #define vand_vx_u16m2 | ( | ... | ) | __riscv_vand_vx_u16m2(__VA_ARGS__) |
| #define vand_vx_u16m2_m | ( | ... | ) | __riscv_vand_vx_u16m2_tumu(__VA_ARGS__) |
| #define vand_vx_u16m4 | ( | ... | ) | __riscv_vand_vx_u16m4(__VA_ARGS__) |
| #define vand_vx_u16m4_m | ( | ... | ) | __riscv_vand_vx_u16m4_tumu(__VA_ARGS__) |
| #define vand_vx_u16m8 | ( | ... | ) | __riscv_vand_vx_u16m8(__VA_ARGS__) |
| #define vand_vx_u16m8_m | ( | ... | ) | __riscv_vand_vx_u16m8_tumu(__VA_ARGS__) |
| #define vand_vx_u16mf2 | ( | ... | ) | __riscv_vand_vx_u16mf2(__VA_ARGS__) |
| #define vand_vx_u16mf2_m | ( | ... | ) | __riscv_vand_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vand_vx_u16mf4 | ( | ... | ) | __riscv_vand_vx_u16mf4(__VA_ARGS__) |
| #define vand_vx_u16mf4_m | ( | ... | ) | __riscv_vand_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vand_vx_u32m1 | ( | ... | ) | __riscv_vand_vx_u32m1(__VA_ARGS__) |
| #define vand_vx_u32m1_m | ( | ... | ) | __riscv_vand_vx_u32m1_tumu(__VA_ARGS__) |
| #define vand_vx_u32m2 | ( | ... | ) | __riscv_vand_vx_u32m2(__VA_ARGS__) |
| #define vand_vx_u32m2_m | ( | ... | ) | __riscv_vand_vx_u32m2_tumu(__VA_ARGS__) |
| #define vand_vx_u32m4 | ( | ... | ) | __riscv_vand_vx_u32m4(__VA_ARGS__) |
| #define vand_vx_u32m4_m | ( | ... | ) | __riscv_vand_vx_u32m4_tumu(__VA_ARGS__) |
| #define vand_vx_u32m8 | ( | ... | ) | __riscv_vand_vx_u32m8(__VA_ARGS__) |
| #define vand_vx_u32m8_m | ( | ... | ) | __riscv_vand_vx_u32m8_tumu(__VA_ARGS__) |
| #define vand_vx_u32mf2 | ( | ... | ) | __riscv_vand_vx_u32mf2(__VA_ARGS__) |
| #define vand_vx_u32mf2_m | ( | ... | ) | __riscv_vand_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vand_vx_u64m1 | ( | ... | ) | __riscv_vand_vx_u64m1(__VA_ARGS__) |
| #define vand_vx_u64m1_m | ( | ... | ) | __riscv_vand_vx_u64m1_tumu(__VA_ARGS__) |
| #define vand_vx_u64m2 | ( | ... | ) | __riscv_vand_vx_u64m2(__VA_ARGS__) |
| #define vand_vx_u64m2_m | ( | ... | ) | __riscv_vand_vx_u64m2_tumu(__VA_ARGS__) |
| #define vand_vx_u64m4 | ( | ... | ) | __riscv_vand_vx_u64m4(__VA_ARGS__) |
| #define vand_vx_u64m4_m | ( | ... | ) | __riscv_vand_vx_u64m4_tumu(__VA_ARGS__) |
| #define vand_vx_u64m8 | ( | ... | ) | __riscv_vand_vx_u64m8(__VA_ARGS__) |
| #define vand_vx_u64m8_m | ( | ... | ) | __riscv_vand_vx_u64m8_tumu(__VA_ARGS__) |
| #define vand_vx_u8m1 | ( | ... | ) | __riscv_vand_vx_u8m1(__VA_ARGS__) |
| #define vand_vx_u8m1_m | ( | ... | ) | __riscv_vand_vx_u8m1_tumu(__VA_ARGS__) |
| #define vand_vx_u8m2 | ( | ... | ) | __riscv_vand_vx_u8m2(__VA_ARGS__) |
| #define vand_vx_u8m2_m | ( | ... | ) | __riscv_vand_vx_u8m2_tumu(__VA_ARGS__) |
| #define vand_vx_u8m4 | ( | ... | ) | __riscv_vand_vx_u8m4(__VA_ARGS__) |
| #define vand_vx_u8m4_m | ( | ... | ) | __riscv_vand_vx_u8m4_tumu(__VA_ARGS__) |
| #define vand_vx_u8m8 | ( | ... | ) | __riscv_vand_vx_u8m8(__VA_ARGS__) |
| #define vand_vx_u8m8_m | ( | ... | ) | __riscv_vand_vx_u8m8_tumu(__VA_ARGS__) |
| #define vand_vx_u8mf2 | ( | ... | ) | __riscv_vand_vx_u8mf2(__VA_ARGS__) |
| #define vand_vx_u8mf2_m | ( | ... | ) | __riscv_vand_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vand_vx_u8mf4 | ( | ... | ) | __riscv_vand_vx_u8mf4(__VA_ARGS__) |
| #define vand_vx_u8mf4_m | ( | ... | ) | __riscv_vand_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vand_vx_u8mf8 | ( | ... | ) | __riscv_vand_vx_u8mf8(__VA_ARGS__) |
| #define vand_vx_u8mf8_m | ( | ... | ) | __riscv_vand_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vasub_vv_i16m1 | ( | ... | ) | __riscv_vasub_vv_i16m1(__VA_ARGS__) |
| #define vasub_vv_i16m1_m | ( | ... | ) | __riscv_vasub_vv_i16m1_tumu(__VA_ARGS__) |
| #define vasub_vv_i16m2 | ( | ... | ) | __riscv_vasub_vv_i16m2(__VA_ARGS__) |
| #define vasub_vv_i16m2_m | ( | ... | ) | __riscv_vasub_vv_i16m2_tumu(__VA_ARGS__) |
| #define vasub_vv_i16m4 | ( | ... | ) | __riscv_vasub_vv_i16m4(__VA_ARGS__) |
| #define vasub_vv_i16m4_m | ( | ... | ) | __riscv_vasub_vv_i16m4_tumu(__VA_ARGS__) |
| #define vasub_vv_i16m8 | ( | ... | ) | __riscv_vasub_vv_i16m8(__VA_ARGS__) |
| #define vasub_vv_i16m8_m | ( | ... | ) | __riscv_vasub_vv_i16m8_tumu(__VA_ARGS__) |
| #define vasub_vv_i16mf2 | ( | ... | ) | __riscv_vasub_vv_i16mf2(__VA_ARGS__) |
| #define vasub_vv_i16mf2_m | ( | ... | ) | __riscv_vasub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vasub_vv_i16mf4 | ( | ... | ) | __riscv_vasub_vv_i16mf4(__VA_ARGS__) |
| #define vasub_vv_i16mf4_m | ( | ... | ) | __riscv_vasub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vasub_vv_i32m1 | ( | ... | ) | __riscv_vasub_vv_i32m1(__VA_ARGS__) |
| #define vasub_vv_i32m1_m | ( | ... | ) | __riscv_vasub_vv_i32m1_tumu(__VA_ARGS__) |
| #define vasub_vv_i32m2 | ( | ... | ) | __riscv_vasub_vv_i32m2(__VA_ARGS__) |
| #define vasub_vv_i32m2_m | ( | ... | ) | __riscv_vasub_vv_i32m2_tumu(__VA_ARGS__) |
| #define vasub_vv_i32m4 | ( | ... | ) | __riscv_vasub_vv_i32m4(__VA_ARGS__) |
| #define vasub_vv_i32m4_m | ( | ... | ) | __riscv_vasub_vv_i32m4_tumu(__VA_ARGS__) |
| #define vasub_vv_i32m8 | ( | ... | ) | __riscv_vasub_vv_i32m8(__VA_ARGS__) |
| #define vasub_vv_i32m8_m | ( | ... | ) | __riscv_vasub_vv_i32m8_tumu(__VA_ARGS__) |
| #define vasub_vv_i32mf2 | ( | ... | ) | __riscv_vasub_vv_i32mf2(__VA_ARGS__) |
| #define vasub_vv_i32mf2_m | ( | ... | ) | __riscv_vasub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vasub_vv_i64m1 | ( | ... | ) | __riscv_vasub_vv_i64m1(__VA_ARGS__) |
| #define vasub_vv_i64m1_m | ( | ... | ) | __riscv_vasub_vv_i64m1_tumu(__VA_ARGS__) |
| #define vasub_vv_i64m2 | ( | ... | ) | __riscv_vasub_vv_i64m2(__VA_ARGS__) |
| #define vasub_vv_i64m2_m | ( | ... | ) | __riscv_vasub_vv_i64m2_tumu(__VA_ARGS__) |
| #define vasub_vv_i64m4 | ( | ... | ) | __riscv_vasub_vv_i64m4(__VA_ARGS__) |
| #define vasub_vv_i64m4_m | ( | ... | ) | __riscv_vasub_vv_i64m4_tumu(__VA_ARGS__) |
| #define vasub_vv_i64m8 | ( | ... | ) | __riscv_vasub_vv_i64m8(__VA_ARGS__) |
| #define vasub_vv_i64m8_m | ( | ... | ) | __riscv_vasub_vv_i64m8_tumu(__VA_ARGS__) |
| #define vasub_vv_i8m1 | ( | ... | ) | __riscv_vasub_vv_i8m1(__VA_ARGS__) |
| #define vasub_vv_i8m1_m | ( | ... | ) | __riscv_vasub_vv_i8m1_tumu(__VA_ARGS__) |
| #define vasub_vv_i8m2 | ( | ... | ) | __riscv_vasub_vv_i8m2(__VA_ARGS__) |
| #define vasub_vv_i8m2_m | ( | ... | ) | __riscv_vasub_vv_i8m2_tumu(__VA_ARGS__) |
| #define vasub_vv_i8m4 | ( | ... | ) | __riscv_vasub_vv_i8m4(__VA_ARGS__) |
| #define vasub_vv_i8m4_m | ( | ... | ) | __riscv_vasub_vv_i8m4_tumu(__VA_ARGS__) |
| #define vasub_vv_i8m8 | ( | ... | ) | __riscv_vasub_vv_i8m8(__VA_ARGS__) |
| #define vasub_vv_i8m8_m | ( | ... | ) | __riscv_vasub_vv_i8m8_tumu(__VA_ARGS__) |
| #define vasub_vv_i8mf2 | ( | ... | ) | __riscv_vasub_vv_i8mf2(__VA_ARGS__) |
| #define vasub_vv_i8mf2_m | ( | ... | ) | __riscv_vasub_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vasub_vv_i8mf4 | ( | ... | ) | __riscv_vasub_vv_i8mf4(__VA_ARGS__) |
| #define vasub_vv_i8mf4_m | ( | ... | ) | __riscv_vasub_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vasub_vv_i8mf8 | ( | ... | ) | __riscv_vasub_vv_i8mf8(__VA_ARGS__) |
| #define vasub_vv_i8mf8_m | ( | ... | ) | __riscv_vasub_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vasub_vx_i16m1 | ( | ... | ) | __riscv_vasub_vx_i16m1(__VA_ARGS__) |
| #define vasub_vx_i16m1_m | ( | ... | ) | __riscv_vasub_vx_i16m1_tumu(__VA_ARGS__) |
| #define vasub_vx_i16m2 | ( | ... | ) | __riscv_vasub_vx_i16m2(__VA_ARGS__) |
| #define vasub_vx_i16m2_m | ( | ... | ) | __riscv_vasub_vx_i16m2_tumu(__VA_ARGS__) |
| #define vasub_vx_i16m4 | ( | ... | ) | __riscv_vasub_vx_i16m4(__VA_ARGS__) |
| #define vasub_vx_i16m4_m | ( | ... | ) | __riscv_vasub_vx_i16m4_tumu(__VA_ARGS__) |
| #define vasub_vx_i16m8 | ( | ... | ) | __riscv_vasub_vx_i16m8(__VA_ARGS__) |
| #define vasub_vx_i16m8_m | ( | ... | ) | __riscv_vasub_vx_i16m8_tumu(__VA_ARGS__) |
| #define vasub_vx_i16mf2 | ( | ... | ) | __riscv_vasub_vx_i16mf2(__VA_ARGS__) |
| #define vasub_vx_i16mf2_m | ( | ... | ) | __riscv_vasub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vasub_vx_i16mf4 | ( | ... | ) | __riscv_vasub_vx_i16mf4(__VA_ARGS__) |
| #define vasub_vx_i16mf4_m | ( | ... | ) | __riscv_vasub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vasub_vx_i32m1 | ( | ... | ) | __riscv_vasub_vx_i32m1(__VA_ARGS__) |
| #define vasub_vx_i32m1_m | ( | ... | ) | __riscv_vasub_vx_i32m1_tumu(__VA_ARGS__) |
| #define vasub_vx_i32m2 | ( | ... | ) | __riscv_vasub_vx_i32m2(__VA_ARGS__) |
| #define vasub_vx_i32m2_m | ( | ... | ) | __riscv_vasub_vx_i32m2_tumu(__VA_ARGS__) |
| #define vasub_vx_i32m4 | ( | ... | ) | __riscv_vasub_vx_i32m4(__VA_ARGS__) |
| #define vasub_vx_i32m4_m | ( | ... | ) | __riscv_vasub_vx_i32m4_tumu(__VA_ARGS__) |
| #define vasub_vx_i32m8 | ( | ... | ) | __riscv_vasub_vx_i32m8(__VA_ARGS__) |
| #define vasub_vx_i32m8_m | ( | ... | ) | __riscv_vasub_vx_i32m8_tumu(__VA_ARGS__) |
| #define vasub_vx_i32mf2 | ( | ... | ) | __riscv_vasub_vx_i32mf2(__VA_ARGS__) |
| #define vasub_vx_i32mf2_m | ( | ... | ) | __riscv_vasub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vasub_vx_i64m1 | ( | ... | ) | __riscv_vasub_vx_i64m1(__VA_ARGS__) |
| #define vasub_vx_i64m1_m | ( | ... | ) | __riscv_vasub_vx_i64m1_tumu(__VA_ARGS__) |
| #define vasub_vx_i64m2 | ( | ... | ) | __riscv_vasub_vx_i64m2(__VA_ARGS__) |
| #define vasub_vx_i64m2_m | ( | ... | ) | __riscv_vasub_vx_i64m2_tumu(__VA_ARGS__) |
| #define vasub_vx_i64m4 | ( | ... | ) | __riscv_vasub_vx_i64m4(__VA_ARGS__) |
| #define vasub_vx_i64m4_m | ( | ... | ) | __riscv_vasub_vx_i64m4_tumu(__VA_ARGS__) |
| #define vasub_vx_i64m8 | ( | ... | ) | __riscv_vasub_vx_i64m8(__VA_ARGS__) |
| #define vasub_vx_i64m8_m | ( | ... | ) | __riscv_vasub_vx_i64m8_tumu(__VA_ARGS__) |
| #define vasub_vx_i8m1 | ( | ... | ) | __riscv_vasub_vx_i8m1(__VA_ARGS__) |
| #define vasub_vx_i8m1_m | ( | ... | ) | __riscv_vasub_vx_i8m1_tumu(__VA_ARGS__) |
| #define vasub_vx_i8m2 | ( | ... | ) | __riscv_vasub_vx_i8m2(__VA_ARGS__) |
| #define vasub_vx_i8m2_m | ( | ... | ) | __riscv_vasub_vx_i8m2_tumu(__VA_ARGS__) |
| #define vasub_vx_i8m4 | ( | ... | ) | __riscv_vasub_vx_i8m4(__VA_ARGS__) |
| #define vasub_vx_i8m4_m | ( | ... | ) | __riscv_vasub_vx_i8m4_tumu(__VA_ARGS__) |
| #define vasub_vx_i8m8 | ( | ... | ) | __riscv_vasub_vx_i8m8(__VA_ARGS__) |
| #define vasub_vx_i8m8_m | ( | ... | ) | __riscv_vasub_vx_i8m8_tumu(__VA_ARGS__) |
| #define vasub_vx_i8mf2 | ( | ... | ) | __riscv_vasub_vx_i8mf2(__VA_ARGS__) |
| #define vasub_vx_i8mf2_m | ( | ... | ) | __riscv_vasub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vasub_vx_i8mf4 | ( | ... | ) | __riscv_vasub_vx_i8mf4(__VA_ARGS__) |
| #define vasub_vx_i8mf4_m | ( | ... | ) | __riscv_vasub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vasub_vx_i8mf8 | ( | ... | ) | __riscv_vasub_vx_i8mf8(__VA_ARGS__) |
| #define vasub_vx_i8mf8_m | ( | ... | ) | __riscv_vasub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vasubu_vv_u16m1 | ( | ... | ) | __riscv_vasubu_vv_u16m1(__VA_ARGS__) |
| #define vasubu_vv_u16m1_m | ( | ... | ) | __riscv_vasubu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vasubu_vv_u16m2 | ( | ... | ) | __riscv_vasubu_vv_u16m2(__VA_ARGS__) |
| #define vasubu_vv_u16m2_m | ( | ... | ) | __riscv_vasubu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vasubu_vv_u16m4 | ( | ... | ) | __riscv_vasubu_vv_u16m4(__VA_ARGS__) |
| #define vasubu_vv_u16m4_m | ( | ... | ) | __riscv_vasubu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vasubu_vv_u16m8 | ( | ... | ) | __riscv_vasubu_vv_u16m8(__VA_ARGS__) |
| #define vasubu_vv_u16m8_m | ( | ... | ) | __riscv_vasubu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vasubu_vv_u16mf2 | ( | ... | ) | __riscv_vasubu_vv_u16mf2(__VA_ARGS__) |
| #define vasubu_vv_u16mf2_m | ( | ... | ) | __riscv_vasubu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vasubu_vv_u16mf4 | ( | ... | ) | __riscv_vasubu_vv_u16mf4(__VA_ARGS__) |
| #define vasubu_vv_u16mf4_m | ( | ... | ) | __riscv_vasubu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vasubu_vv_u32m1 | ( | ... | ) | __riscv_vasubu_vv_u32m1(__VA_ARGS__) |
| #define vasubu_vv_u32m1_m | ( | ... | ) | __riscv_vasubu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vasubu_vv_u32m2 | ( | ... | ) | __riscv_vasubu_vv_u32m2(__VA_ARGS__) |
| #define vasubu_vv_u32m2_m | ( | ... | ) | __riscv_vasubu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vasubu_vv_u32m4 | ( | ... | ) | __riscv_vasubu_vv_u32m4(__VA_ARGS__) |
| #define vasubu_vv_u32m4_m | ( | ... | ) | __riscv_vasubu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vasubu_vv_u32m8 | ( | ... | ) | __riscv_vasubu_vv_u32m8(__VA_ARGS__) |
| #define vasubu_vv_u32m8_m | ( | ... | ) | __riscv_vasubu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vasubu_vv_u32mf2 | ( | ... | ) | __riscv_vasubu_vv_u32mf2(__VA_ARGS__) |
| #define vasubu_vv_u32mf2_m | ( | ... | ) | __riscv_vasubu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vasubu_vv_u64m1 | ( | ... | ) | __riscv_vasubu_vv_u64m1(__VA_ARGS__) |
| #define vasubu_vv_u64m1_m | ( | ... | ) | __riscv_vasubu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vasubu_vv_u64m2 | ( | ... | ) | __riscv_vasubu_vv_u64m2(__VA_ARGS__) |
| #define vasubu_vv_u64m2_m | ( | ... | ) | __riscv_vasubu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vasubu_vv_u64m4 | ( | ... | ) | __riscv_vasubu_vv_u64m4(__VA_ARGS__) |
| #define vasubu_vv_u64m4_m | ( | ... | ) | __riscv_vasubu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vasubu_vv_u64m8 | ( | ... | ) | __riscv_vasubu_vv_u64m8(__VA_ARGS__) |
| #define vasubu_vv_u64m8_m | ( | ... | ) | __riscv_vasubu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vasubu_vv_u8m1 | ( | ... | ) | __riscv_vasubu_vv_u8m1(__VA_ARGS__) |
| #define vasubu_vv_u8m1_m | ( | ... | ) | __riscv_vasubu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vasubu_vv_u8m2 | ( | ... | ) | __riscv_vasubu_vv_u8m2(__VA_ARGS__) |
| #define vasubu_vv_u8m2_m | ( | ... | ) | __riscv_vasubu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vasubu_vv_u8m4 | ( | ... | ) | __riscv_vasubu_vv_u8m4(__VA_ARGS__) |
| #define vasubu_vv_u8m4_m | ( | ... | ) | __riscv_vasubu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vasubu_vv_u8m8 | ( | ... | ) | __riscv_vasubu_vv_u8m8(__VA_ARGS__) |
| #define vasubu_vv_u8m8_m | ( | ... | ) | __riscv_vasubu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vasubu_vv_u8mf2 | ( | ... | ) | __riscv_vasubu_vv_u8mf2(__VA_ARGS__) |
| #define vasubu_vv_u8mf2_m | ( | ... | ) | __riscv_vasubu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vasubu_vv_u8mf4 | ( | ... | ) | __riscv_vasubu_vv_u8mf4(__VA_ARGS__) |
| #define vasubu_vv_u8mf4_m | ( | ... | ) | __riscv_vasubu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vasubu_vv_u8mf8 | ( | ... | ) | __riscv_vasubu_vv_u8mf8(__VA_ARGS__) |
| #define vasubu_vv_u8mf8_m | ( | ... | ) | __riscv_vasubu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vasubu_vx_u16m1 | ( | ... | ) | __riscv_vasubu_vx_u16m1(__VA_ARGS__) |
| #define vasubu_vx_u16m1_m | ( | ... | ) | __riscv_vasubu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vasubu_vx_u16m2 | ( | ... | ) | __riscv_vasubu_vx_u16m2(__VA_ARGS__) |
| #define vasubu_vx_u16m2_m | ( | ... | ) | __riscv_vasubu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vasubu_vx_u16m4 | ( | ... | ) | __riscv_vasubu_vx_u16m4(__VA_ARGS__) |
| #define vasubu_vx_u16m4_m | ( | ... | ) | __riscv_vasubu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vasubu_vx_u16m8 | ( | ... | ) | __riscv_vasubu_vx_u16m8(__VA_ARGS__) |
| #define vasubu_vx_u16m8_m | ( | ... | ) | __riscv_vasubu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vasubu_vx_u16mf2 | ( | ... | ) | __riscv_vasubu_vx_u16mf2(__VA_ARGS__) |
| #define vasubu_vx_u16mf2_m | ( | ... | ) | __riscv_vasubu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vasubu_vx_u16mf4 | ( | ... | ) | __riscv_vasubu_vx_u16mf4(__VA_ARGS__) |
| #define vasubu_vx_u16mf4_m | ( | ... | ) | __riscv_vasubu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vasubu_vx_u32m1 | ( | ... | ) | __riscv_vasubu_vx_u32m1(__VA_ARGS__) |
| #define vasubu_vx_u32m1_m | ( | ... | ) | __riscv_vasubu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vasubu_vx_u32m2 | ( | ... | ) | __riscv_vasubu_vx_u32m2(__VA_ARGS__) |
| #define vasubu_vx_u32m2_m | ( | ... | ) | __riscv_vasubu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vasubu_vx_u32m4 | ( | ... | ) | __riscv_vasubu_vx_u32m4(__VA_ARGS__) |
| #define vasubu_vx_u32m4_m | ( | ... | ) | __riscv_vasubu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vasubu_vx_u32m8 | ( | ... | ) | __riscv_vasubu_vx_u32m8(__VA_ARGS__) |
| #define vasubu_vx_u32m8_m | ( | ... | ) | __riscv_vasubu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vasubu_vx_u32mf2 | ( | ... | ) | __riscv_vasubu_vx_u32mf2(__VA_ARGS__) |
| #define vasubu_vx_u32mf2_m | ( | ... | ) | __riscv_vasubu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vasubu_vx_u64m1 | ( | ... | ) | __riscv_vasubu_vx_u64m1(__VA_ARGS__) |
| #define vasubu_vx_u64m1_m | ( | ... | ) | __riscv_vasubu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vasubu_vx_u64m2 | ( | ... | ) | __riscv_vasubu_vx_u64m2(__VA_ARGS__) |
| #define vasubu_vx_u64m2_m | ( | ... | ) | __riscv_vasubu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vasubu_vx_u64m4 | ( | ... | ) | __riscv_vasubu_vx_u64m4(__VA_ARGS__) |
| #define vasubu_vx_u64m4_m | ( | ... | ) | __riscv_vasubu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vasubu_vx_u64m8 | ( | ... | ) | __riscv_vasubu_vx_u64m8(__VA_ARGS__) |
| #define vasubu_vx_u64m8_m | ( | ... | ) | __riscv_vasubu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vasubu_vx_u8m1 | ( | ... | ) | __riscv_vasubu_vx_u8m1(__VA_ARGS__) |
| #define vasubu_vx_u8m1_m | ( | ... | ) | __riscv_vasubu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vasubu_vx_u8m2 | ( | ... | ) | __riscv_vasubu_vx_u8m2(__VA_ARGS__) |
| #define vasubu_vx_u8m2_m | ( | ... | ) | __riscv_vasubu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vasubu_vx_u8m4 | ( | ... | ) | __riscv_vasubu_vx_u8m4(__VA_ARGS__) |
| #define vasubu_vx_u8m4_m | ( | ... | ) | __riscv_vasubu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vasubu_vx_u8m8 | ( | ... | ) | __riscv_vasubu_vx_u8m8(__VA_ARGS__) |
| #define vasubu_vx_u8m8_m | ( | ... | ) | __riscv_vasubu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vasubu_vx_u8mf2 | ( | ... | ) | __riscv_vasubu_vx_u8mf2(__VA_ARGS__) |
| #define vasubu_vx_u8mf2_m | ( | ... | ) | __riscv_vasubu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vasubu_vx_u8mf4 | ( | ... | ) | __riscv_vasubu_vx_u8mf4(__VA_ARGS__) |
| #define vasubu_vx_u8mf4_m | ( | ... | ) | __riscv_vasubu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vasubu_vx_u8mf8 | ( | ... | ) | __riscv_vasubu_vx_u8mf8(__VA_ARGS__) |
| #define vasubu_vx_u8mf8_m | ( | ... | ) | __riscv_vasubu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vcompress_vm_f16m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f16m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f16m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f16m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f16m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f16m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f16m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f16m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f16mf2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f16mf2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f16mf4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f16mf4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f32m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f32m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f32m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f32m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f32m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f32m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f32m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f32m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f32mf2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f32mf2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f64m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f64m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f64m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f64m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f64m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f64m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_f64m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_f64m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i16m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i16m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i16m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i16m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i16m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i16m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i16m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i16m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i16mf2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i16mf2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i16mf4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i16mf4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i32m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i32m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i32m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i32m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i32m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i32m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i32m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i32m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i32mf2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i32mf2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i64m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i64m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i64m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i64m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i64m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i64m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i64m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i64m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i8m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i8m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i8m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i8m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i8m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i8m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i8m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i8m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i8mf2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i8mf2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i8mf4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i8mf4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_i8mf8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_i8mf8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u16m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u16m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u16m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u16m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u16m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u16m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u16m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u16m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u16mf2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u16mf2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u16mf4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u16mf4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u32m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u32m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u32m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u32m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u32m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u32m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u32m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u32m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u32mf2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u32mf2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u64m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u64m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u64m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u64m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u64m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u64m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u64m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u64m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u8m1 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u8m1_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u8m2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u8m2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u8m4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u8m4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u8m8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u8m8_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u8mf2 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u8mf2_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u8mf4 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u8mf4_tu((dest), (src), (mask), (vl)) |
| #define vcompress_vm_u8mf8 | ( | mask, | |
| dest, | |||
| src, | |||
| vl | |||
| ) | __riscv_vcompress_vm_u8mf8_tu((dest), (src), (mask), (vl)) |
| #define vcpop_m_b1 | ( | ... | ) | __riscv_vcpop_m_b1(__VA_ARGS__) |
| #define vcpop_m_b16 | ( | ... | ) | __riscv_vcpop_m_b16(__VA_ARGS__) |
| #define vcpop_m_b16_m | ( | ... | ) | __riscv_vcpop_m_b16_m(__VA_ARGS__) |
| #define vcpop_m_b1_m | ( | ... | ) | __riscv_vcpop_m_b1_m(__VA_ARGS__) |
| #define vcpop_m_b2 | ( | ... | ) | __riscv_vcpop_m_b2(__VA_ARGS__) |
| #define vcpop_m_b2_m | ( | ... | ) | __riscv_vcpop_m_b2_m(__VA_ARGS__) |
| #define vcpop_m_b32 | ( | ... | ) | __riscv_vcpop_m_b32(__VA_ARGS__) |
| #define vcpop_m_b32_m | ( | ... | ) | __riscv_vcpop_m_b32_m(__VA_ARGS__) |
| #define vcpop_m_b4 | ( | ... | ) | __riscv_vcpop_m_b4(__VA_ARGS__) |
| #define vcpop_m_b4_m | ( | ... | ) | __riscv_vcpop_m_b4_m(__VA_ARGS__) |
| #define vcpop_m_b64 | ( | ... | ) | __riscv_vcpop_m_b64(__VA_ARGS__) |
| #define vcpop_m_b64_m | ( | ... | ) | __riscv_vcpop_m_b64_m(__VA_ARGS__) |
| #define vcpop_m_b8 | ( | ... | ) | __riscv_vcpop_m_b8(__VA_ARGS__) |
| #define vcpop_m_b8_m | ( | ... | ) | __riscv_vcpop_m_b8_m(__VA_ARGS__) |
| #define vdiv_vv_i16m1 | ( | ... | ) | __riscv_vdiv_vv_i16m1(__VA_ARGS__) |
| #define vdiv_vv_i16m1_m | ( | ... | ) | __riscv_vdiv_vv_i16m1_tumu(__VA_ARGS__) |
| #define vdiv_vv_i16m2 | ( | ... | ) | __riscv_vdiv_vv_i16m2(__VA_ARGS__) |
| #define vdiv_vv_i16m2_m | ( | ... | ) | __riscv_vdiv_vv_i16m2_tumu(__VA_ARGS__) |
| #define vdiv_vv_i16m4 | ( | ... | ) | __riscv_vdiv_vv_i16m4(__VA_ARGS__) |
| #define vdiv_vv_i16m4_m | ( | ... | ) | __riscv_vdiv_vv_i16m4_tumu(__VA_ARGS__) |
| #define vdiv_vv_i16m8 | ( | ... | ) | __riscv_vdiv_vv_i16m8(__VA_ARGS__) |
| #define vdiv_vv_i16m8_m | ( | ... | ) | __riscv_vdiv_vv_i16m8_tumu(__VA_ARGS__) |
| #define vdiv_vv_i16mf2 | ( | ... | ) | __riscv_vdiv_vv_i16mf2(__VA_ARGS__) |
| #define vdiv_vv_i16mf2_m | ( | ... | ) | __riscv_vdiv_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vdiv_vv_i16mf4 | ( | ... | ) | __riscv_vdiv_vv_i16mf4(__VA_ARGS__) |
| #define vdiv_vv_i16mf4_m | ( | ... | ) | __riscv_vdiv_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vdiv_vv_i32m1 | ( | ... | ) | __riscv_vdiv_vv_i32m1(__VA_ARGS__) |
| #define vdiv_vv_i32m1_m | ( | ... | ) | __riscv_vdiv_vv_i32m1_tumu(__VA_ARGS__) |
| #define vdiv_vv_i32m2 | ( | ... | ) | __riscv_vdiv_vv_i32m2(__VA_ARGS__) |
| #define vdiv_vv_i32m2_m | ( | ... | ) | __riscv_vdiv_vv_i32m2_tumu(__VA_ARGS__) |
| #define vdiv_vv_i32m4 | ( | ... | ) | __riscv_vdiv_vv_i32m4(__VA_ARGS__) |
| #define vdiv_vv_i32m4_m | ( | ... | ) | __riscv_vdiv_vv_i32m4_tumu(__VA_ARGS__) |
| #define vdiv_vv_i32m8 | ( | ... | ) | __riscv_vdiv_vv_i32m8(__VA_ARGS__) |
| #define vdiv_vv_i32m8_m | ( | ... | ) | __riscv_vdiv_vv_i32m8_tumu(__VA_ARGS__) |
| #define vdiv_vv_i32mf2 | ( | ... | ) | __riscv_vdiv_vv_i32mf2(__VA_ARGS__) |
| #define vdiv_vv_i32mf2_m | ( | ... | ) | __riscv_vdiv_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vdiv_vv_i64m1 | ( | ... | ) | __riscv_vdiv_vv_i64m1(__VA_ARGS__) |
| #define vdiv_vv_i64m1_m | ( | ... | ) | __riscv_vdiv_vv_i64m1_tumu(__VA_ARGS__) |
| #define vdiv_vv_i64m2 | ( | ... | ) | __riscv_vdiv_vv_i64m2(__VA_ARGS__) |
| #define vdiv_vv_i64m2_m | ( | ... | ) | __riscv_vdiv_vv_i64m2_tumu(__VA_ARGS__) |
| #define vdiv_vv_i64m4 | ( | ... | ) | __riscv_vdiv_vv_i64m4(__VA_ARGS__) |
| #define vdiv_vv_i64m4_m | ( | ... | ) | __riscv_vdiv_vv_i64m4_tumu(__VA_ARGS__) |
| #define vdiv_vv_i64m8 | ( | ... | ) | __riscv_vdiv_vv_i64m8(__VA_ARGS__) |
| #define vdiv_vv_i64m8_m | ( | ... | ) | __riscv_vdiv_vv_i64m8_tumu(__VA_ARGS__) |
| #define vdiv_vv_i8m1 | ( | ... | ) | __riscv_vdiv_vv_i8m1(__VA_ARGS__) |
| #define vdiv_vv_i8m1_m | ( | ... | ) | __riscv_vdiv_vv_i8m1_tumu(__VA_ARGS__) |
| #define vdiv_vv_i8m2 | ( | ... | ) | __riscv_vdiv_vv_i8m2(__VA_ARGS__) |
| #define vdiv_vv_i8m2_m | ( | ... | ) | __riscv_vdiv_vv_i8m2_tumu(__VA_ARGS__) |
| #define vdiv_vv_i8m4 | ( | ... | ) | __riscv_vdiv_vv_i8m4(__VA_ARGS__) |
| #define vdiv_vv_i8m4_m | ( | ... | ) | __riscv_vdiv_vv_i8m4_tumu(__VA_ARGS__) |
| #define vdiv_vv_i8m8 | ( | ... | ) | __riscv_vdiv_vv_i8m8(__VA_ARGS__) |
| #define vdiv_vv_i8m8_m | ( | ... | ) | __riscv_vdiv_vv_i8m8_tumu(__VA_ARGS__) |
| #define vdiv_vv_i8mf2 | ( | ... | ) | __riscv_vdiv_vv_i8mf2(__VA_ARGS__) |
| #define vdiv_vv_i8mf2_m | ( | ... | ) | __riscv_vdiv_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vdiv_vv_i8mf4 | ( | ... | ) | __riscv_vdiv_vv_i8mf4(__VA_ARGS__) |
| #define vdiv_vv_i8mf4_m | ( | ... | ) | __riscv_vdiv_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vdiv_vv_i8mf8 | ( | ... | ) | __riscv_vdiv_vv_i8mf8(__VA_ARGS__) |
| #define vdiv_vv_i8mf8_m | ( | ... | ) | __riscv_vdiv_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vdiv_vx_i16m1 | ( | ... | ) | __riscv_vdiv_vx_i16m1(__VA_ARGS__) |
| #define vdiv_vx_i16m1_m | ( | ... | ) | __riscv_vdiv_vx_i16m1_tumu(__VA_ARGS__) |
| #define vdiv_vx_i16m2 | ( | ... | ) | __riscv_vdiv_vx_i16m2(__VA_ARGS__) |
| #define vdiv_vx_i16m2_m | ( | ... | ) | __riscv_vdiv_vx_i16m2_tumu(__VA_ARGS__) |
| #define vdiv_vx_i16m4 | ( | ... | ) | __riscv_vdiv_vx_i16m4(__VA_ARGS__) |
| #define vdiv_vx_i16m4_m | ( | ... | ) | __riscv_vdiv_vx_i16m4_tumu(__VA_ARGS__) |
| #define vdiv_vx_i16m8 | ( | ... | ) | __riscv_vdiv_vx_i16m8(__VA_ARGS__) |
| #define vdiv_vx_i16m8_m | ( | ... | ) | __riscv_vdiv_vx_i16m8_tumu(__VA_ARGS__) |
| #define vdiv_vx_i16mf2 | ( | ... | ) | __riscv_vdiv_vx_i16mf2(__VA_ARGS__) |
| #define vdiv_vx_i16mf2_m | ( | ... | ) | __riscv_vdiv_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vdiv_vx_i16mf4 | ( | ... | ) | __riscv_vdiv_vx_i16mf4(__VA_ARGS__) |
| #define vdiv_vx_i16mf4_m | ( | ... | ) | __riscv_vdiv_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vdiv_vx_i32m1 | ( | ... | ) | __riscv_vdiv_vx_i32m1(__VA_ARGS__) |
| #define vdiv_vx_i32m1_m | ( | ... | ) | __riscv_vdiv_vx_i32m1_tumu(__VA_ARGS__) |
| #define vdiv_vx_i32m2 | ( | ... | ) | __riscv_vdiv_vx_i32m2(__VA_ARGS__) |
| #define vdiv_vx_i32m2_m | ( | ... | ) | __riscv_vdiv_vx_i32m2_tumu(__VA_ARGS__) |
| #define vdiv_vx_i32m4 | ( | ... | ) | __riscv_vdiv_vx_i32m4(__VA_ARGS__) |
| #define vdiv_vx_i32m4_m | ( | ... | ) | __riscv_vdiv_vx_i32m4_tumu(__VA_ARGS__) |
| #define vdiv_vx_i32m8 | ( | ... | ) | __riscv_vdiv_vx_i32m8(__VA_ARGS__) |
| #define vdiv_vx_i32m8_m | ( | ... | ) | __riscv_vdiv_vx_i32m8_tumu(__VA_ARGS__) |
| #define vdiv_vx_i32mf2 | ( | ... | ) | __riscv_vdiv_vx_i32mf2(__VA_ARGS__) |
| #define vdiv_vx_i32mf2_m | ( | ... | ) | __riscv_vdiv_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vdiv_vx_i64m1 | ( | ... | ) | __riscv_vdiv_vx_i64m1(__VA_ARGS__) |
| #define vdiv_vx_i64m1_m | ( | ... | ) | __riscv_vdiv_vx_i64m1_tumu(__VA_ARGS__) |
| #define vdiv_vx_i64m2 | ( | ... | ) | __riscv_vdiv_vx_i64m2(__VA_ARGS__) |
| #define vdiv_vx_i64m2_m | ( | ... | ) | __riscv_vdiv_vx_i64m2_tumu(__VA_ARGS__) |
| #define vdiv_vx_i64m4 | ( | ... | ) | __riscv_vdiv_vx_i64m4(__VA_ARGS__) |
| #define vdiv_vx_i64m4_m | ( | ... | ) | __riscv_vdiv_vx_i64m4_tumu(__VA_ARGS__) |
| #define vdiv_vx_i64m8 | ( | ... | ) | __riscv_vdiv_vx_i64m8(__VA_ARGS__) |
| #define vdiv_vx_i64m8_m | ( | ... | ) | __riscv_vdiv_vx_i64m8_tumu(__VA_ARGS__) |
| #define vdiv_vx_i8m1 | ( | ... | ) | __riscv_vdiv_vx_i8m1(__VA_ARGS__) |
| #define vdiv_vx_i8m1_m | ( | ... | ) | __riscv_vdiv_vx_i8m1_tumu(__VA_ARGS__) |
| #define vdiv_vx_i8m2 | ( | ... | ) | __riscv_vdiv_vx_i8m2(__VA_ARGS__) |
| #define vdiv_vx_i8m2_m | ( | ... | ) | __riscv_vdiv_vx_i8m2_tumu(__VA_ARGS__) |
| #define vdiv_vx_i8m4 | ( | ... | ) | __riscv_vdiv_vx_i8m4(__VA_ARGS__) |
| #define vdiv_vx_i8m4_m | ( | ... | ) | __riscv_vdiv_vx_i8m4_tumu(__VA_ARGS__) |
| #define vdiv_vx_i8m8 | ( | ... | ) | __riscv_vdiv_vx_i8m8(__VA_ARGS__) |
| #define vdiv_vx_i8m8_m | ( | ... | ) | __riscv_vdiv_vx_i8m8_tumu(__VA_ARGS__) |
| #define vdiv_vx_i8mf2 | ( | ... | ) | __riscv_vdiv_vx_i8mf2(__VA_ARGS__) |
| #define vdiv_vx_i8mf2_m | ( | ... | ) | __riscv_vdiv_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vdiv_vx_i8mf4 | ( | ... | ) | __riscv_vdiv_vx_i8mf4(__VA_ARGS__) |
| #define vdiv_vx_i8mf4_m | ( | ... | ) | __riscv_vdiv_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vdiv_vx_i8mf8 | ( | ... | ) | __riscv_vdiv_vx_i8mf8(__VA_ARGS__) |
| #define vdiv_vx_i8mf8_m | ( | ... | ) | __riscv_vdiv_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vdivu_vv_u16m1 | ( | ... | ) | __riscv_vdivu_vv_u16m1(__VA_ARGS__) |
| #define vdivu_vv_u16m1_m | ( | ... | ) | __riscv_vdivu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vdivu_vv_u16m2 | ( | ... | ) | __riscv_vdivu_vv_u16m2(__VA_ARGS__) |
| #define vdivu_vv_u16m2_m | ( | ... | ) | __riscv_vdivu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vdivu_vv_u16m4 | ( | ... | ) | __riscv_vdivu_vv_u16m4(__VA_ARGS__) |
| #define vdivu_vv_u16m4_m | ( | ... | ) | __riscv_vdivu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vdivu_vv_u16m8 | ( | ... | ) | __riscv_vdivu_vv_u16m8(__VA_ARGS__) |
| #define vdivu_vv_u16m8_m | ( | ... | ) | __riscv_vdivu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vdivu_vv_u16mf2 | ( | ... | ) | __riscv_vdivu_vv_u16mf2(__VA_ARGS__) |
| #define vdivu_vv_u16mf2_m | ( | ... | ) | __riscv_vdivu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vdivu_vv_u16mf4 | ( | ... | ) | __riscv_vdivu_vv_u16mf4(__VA_ARGS__) |
| #define vdivu_vv_u16mf4_m | ( | ... | ) | __riscv_vdivu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vdivu_vv_u32m1 | ( | ... | ) | __riscv_vdivu_vv_u32m1(__VA_ARGS__) |
| #define vdivu_vv_u32m1_m | ( | ... | ) | __riscv_vdivu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vdivu_vv_u32m2 | ( | ... | ) | __riscv_vdivu_vv_u32m2(__VA_ARGS__) |
| #define vdivu_vv_u32m2_m | ( | ... | ) | __riscv_vdivu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vdivu_vv_u32m4 | ( | ... | ) | __riscv_vdivu_vv_u32m4(__VA_ARGS__) |
| #define vdivu_vv_u32m4_m | ( | ... | ) | __riscv_vdivu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vdivu_vv_u32m8 | ( | ... | ) | __riscv_vdivu_vv_u32m8(__VA_ARGS__) |
| #define vdivu_vv_u32m8_m | ( | ... | ) | __riscv_vdivu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vdivu_vv_u32mf2 | ( | ... | ) | __riscv_vdivu_vv_u32mf2(__VA_ARGS__) |
| #define vdivu_vv_u32mf2_m | ( | ... | ) | __riscv_vdivu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vdivu_vv_u64m1 | ( | ... | ) | __riscv_vdivu_vv_u64m1(__VA_ARGS__) |
| #define vdivu_vv_u64m1_m | ( | ... | ) | __riscv_vdivu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vdivu_vv_u64m2 | ( | ... | ) | __riscv_vdivu_vv_u64m2(__VA_ARGS__) |
| #define vdivu_vv_u64m2_m | ( | ... | ) | __riscv_vdivu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vdivu_vv_u64m4 | ( | ... | ) | __riscv_vdivu_vv_u64m4(__VA_ARGS__) |
| #define vdivu_vv_u64m4_m | ( | ... | ) | __riscv_vdivu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vdivu_vv_u64m8 | ( | ... | ) | __riscv_vdivu_vv_u64m8(__VA_ARGS__) |
| #define vdivu_vv_u64m8_m | ( | ... | ) | __riscv_vdivu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vdivu_vv_u8m1 | ( | ... | ) | __riscv_vdivu_vv_u8m1(__VA_ARGS__) |
| #define vdivu_vv_u8m1_m | ( | ... | ) | __riscv_vdivu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vdivu_vv_u8m2 | ( | ... | ) | __riscv_vdivu_vv_u8m2(__VA_ARGS__) |
| #define vdivu_vv_u8m2_m | ( | ... | ) | __riscv_vdivu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vdivu_vv_u8m4 | ( | ... | ) | __riscv_vdivu_vv_u8m4(__VA_ARGS__) |
| #define vdivu_vv_u8m4_m | ( | ... | ) | __riscv_vdivu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vdivu_vv_u8m8 | ( | ... | ) | __riscv_vdivu_vv_u8m8(__VA_ARGS__) |
| #define vdivu_vv_u8m8_m | ( | ... | ) | __riscv_vdivu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vdivu_vv_u8mf2 | ( | ... | ) | __riscv_vdivu_vv_u8mf2(__VA_ARGS__) |
| #define vdivu_vv_u8mf2_m | ( | ... | ) | __riscv_vdivu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vdivu_vv_u8mf4 | ( | ... | ) | __riscv_vdivu_vv_u8mf4(__VA_ARGS__) |
| #define vdivu_vv_u8mf4_m | ( | ... | ) | __riscv_vdivu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vdivu_vv_u8mf8 | ( | ... | ) | __riscv_vdivu_vv_u8mf8(__VA_ARGS__) |
| #define vdivu_vv_u8mf8_m | ( | ... | ) | __riscv_vdivu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vdivu_vx_u16m1 | ( | ... | ) | __riscv_vdivu_vx_u16m1(__VA_ARGS__) |
| #define vdivu_vx_u16m1_m | ( | ... | ) | __riscv_vdivu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vdivu_vx_u16m2 | ( | ... | ) | __riscv_vdivu_vx_u16m2(__VA_ARGS__) |
| #define vdivu_vx_u16m2_m | ( | ... | ) | __riscv_vdivu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vdivu_vx_u16m4 | ( | ... | ) | __riscv_vdivu_vx_u16m4(__VA_ARGS__) |
| #define vdivu_vx_u16m4_m | ( | ... | ) | __riscv_vdivu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vdivu_vx_u16m8 | ( | ... | ) | __riscv_vdivu_vx_u16m8(__VA_ARGS__) |
| #define vdivu_vx_u16m8_m | ( | ... | ) | __riscv_vdivu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vdivu_vx_u16mf2 | ( | ... | ) | __riscv_vdivu_vx_u16mf2(__VA_ARGS__) |
| #define vdivu_vx_u16mf2_m | ( | ... | ) | __riscv_vdivu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vdivu_vx_u16mf4 | ( | ... | ) | __riscv_vdivu_vx_u16mf4(__VA_ARGS__) |
| #define vdivu_vx_u16mf4_m | ( | ... | ) | __riscv_vdivu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vdivu_vx_u32m1 | ( | ... | ) | __riscv_vdivu_vx_u32m1(__VA_ARGS__) |
| #define vdivu_vx_u32m1_m | ( | ... | ) | __riscv_vdivu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vdivu_vx_u32m2 | ( | ... | ) | __riscv_vdivu_vx_u32m2(__VA_ARGS__) |
| #define vdivu_vx_u32m2_m | ( | ... | ) | __riscv_vdivu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vdivu_vx_u32m4 | ( | ... | ) | __riscv_vdivu_vx_u32m4(__VA_ARGS__) |
| #define vdivu_vx_u32m4_m | ( | ... | ) | __riscv_vdivu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vdivu_vx_u32m8 | ( | ... | ) | __riscv_vdivu_vx_u32m8(__VA_ARGS__) |
| #define vdivu_vx_u32m8_m | ( | ... | ) | __riscv_vdivu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vdivu_vx_u32mf2 | ( | ... | ) | __riscv_vdivu_vx_u32mf2(__VA_ARGS__) |
| #define vdivu_vx_u32mf2_m | ( | ... | ) | __riscv_vdivu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vdivu_vx_u64m1 | ( | ... | ) | __riscv_vdivu_vx_u64m1(__VA_ARGS__) |
| #define vdivu_vx_u64m1_m | ( | ... | ) | __riscv_vdivu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vdivu_vx_u64m2 | ( | ... | ) | __riscv_vdivu_vx_u64m2(__VA_ARGS__) |
| #define vdivu_vx_u64m2_m | ( | ... | ) | __riscv_vdivu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vdivu_vx_u64m4 | ( | ... | ) | __riscv_vdivu_vx_u64m4(__VA_ARGS__) |
| #define vdivu_vx_u64m4_m | ( | ... | ) | __riscv_vdivu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vdivu_vx_u64m8 | ( | ... | ) | __riscv_vdivu_vx_u64m8(__VA_ARGS__) |
| #define vdivu_vx_u64m8_m | ( | ... | ) | __riscv_vdivu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vdivu_vx_u8m1 | ( | ... | ) | __riscv_vdivu_vx_u8m1(__VA_ARGS__) |
| #define vdivu_vx_u8m1_m | ( | ... | ) | __riscv_vdivu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vdivu_vx_u8m2 | ( | ... | ) | __riscv_vdivu_vx_u8m2(__VA_ARGS__) |
| #define vdivu_vx_u8m2_m | ( | ... | ) | __riscv_vdivu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vdivu_vx_u8m4 | ( | ... | ) | __riscv_vdivu_vx_u8m4(__VA_ARGS__) |
| #define vdivu_vx_u8m4_m | ( | ... | ) | __riscv_vdivu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vdivu_vx_u8m8 | ( | ... | ) | __riscv_vdivu_vx_u8m8(__VA_ARGS__) |
| #define vdivu_vx_u8m8_m | ( | ... | ) | __riscv_vdivu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vdivu_vx_u8mf2 | ( | ... | ) | __riscv_vdivu_vx_u8mf2(__VA_ARGS__) |
| #define vdivu_vx_u8mf2_m | ( | ... | ) | __riscv_vdivu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vdivu_vx_u8mf4 | ( | ... | ) | __riscv_vdivu_vx_u8mf4(__VA_ARGS__) |
| #define vdivu_vx_u8mf4_m | ( | ... | ) | __riscv_vdivu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vdivu_vx_u8mf8 | ( | ... | ) | __riscv_vdivu_vx_u8mf8(__VA_ARGS__) |
| #define vdivu_vx_u8mf8_m | ( | ... | ) | __riscv_vdivu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vfabs_v_f16m1 | ( | ... | ) | __riscv_vfabs_v_f16m1(__VA_ARGS__) |
| #define vfabs_v_f16m1_m | ( | ... | ) | __riscv_vfabs_v_f16m1_tumu(__VA_ARGS__) |
| #define vfabs_v_f16m2 | ( | ... | ) | __riscv_vfabs_v_f16m2(__VA_ARGS__) |
| #define vfabs_v_f16m2_m | ( | ... | ) | __riscv_vfabs_v_f16m2_tumu(__VA_ARGS__) |
| #define vfabs_v_f16m4 | ( | ... | ) | __riscv_vfabs_v_f16m4(__VA_ARGS__) |
| #define vfabs_v_f16m4_m | ( | ... | ) | __riscv_vfabs_v_f16m4_tumu(__VA_ARGS__) |
| #define vfabs_v_f16m8 | ( | ... | ) | __riscv_vfabs_v_f16m8(__VA_ARGS__) |
| #define vfabs_v_f16m8_m | ( | ... | ) | __riscv_vfabs_v_f16m8_tumu(__VA_ARGS__) |
| #define vfabs_v_f16mf2 | ( | ... | ) | __riscv_vfabs_v_f16mf2(__VA_ARGS__) |
| #define vfabs_v_f16mf2_m | ( | ... | ) | __riscv_vfabs_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfabs_v_f16mf4 | ( | ... | ) | __riscv_vfabs_v_f16mf4(__VA_ARGS__) |
| #define vfabs_v_f16mf4_m | ( | ... | ) | __riscv_vfabs_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfabs_v_f32m1 | ( | ... | ) | __riscv_vfabs_v_f32m1(__VA_ARGS__) |
| #define vfabs_v_f32m1_m | ( | ... | ) | __riscv_vfabs_v_f32m1_tumu(__VA_ARGS__) |
| #define vfabs_v_f32m2 | ( | ... | ) | __riscv_vfabs_v_f32m2(__VA_ARGS__) |
| #define vfabs_v_f32m2_m | ( | ... | ) | __riscv_vfabs_v_f32m2_tumu(__VA_ARGS__) |
| #define vfabs_v_f32m4 | ( | ... | ) | __riscv_vfabs_v_f32m4(__VA_ARGS__) |
| #define vfabs_v_f32m4_m | ( | ... | ) | __riscv_vfabs_v_f32m4_tumu(__VA_ARGS__) |
| #define vfabs_v_f32m8 | ( | ... | ) | __riscv_vfabs_v_f32m8(__VA_ARGS__) |
| #define vfabs_v_f32m8_m | ( | ... | ) | __riscv_vfabs_v_f32m8_tumu(__VA_ARGS__) |
| #define vfabs_v_f32mf2 | ( | ... | ) | __riscv_vfabs_v_f32mf2(__VA_ARGS__) |
| #define vfabs_v_f32mf2_m | ( | ... | ) | __riscv_vfabs_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfabs_v_f64m1 | ( | ... | ) | __riscv_vfabs_v_f64m1(__VA_ARGS__) |
| #define vfabs_v_f64m1_m | ( | ... | ) | __riscv_vfabs_v_f64m1_tumu(__VA_ARGS__) |
| #define vfabs_v_f64m2 | ( | ... | ) | __riscv_vfabs_v_f64m2(__VA_ARGS__) |
| #define vfabs_v_f64m2_m | ( | ... | ) | __riscv_vfabs_v_f64m2_tumu(__VA_ARGS__) |
| #define vfabs_v_f64m4 | ( | ... | ) | __riscv_vfabs_v_f64m4(__VA_ARGS__) |
| #define vfabs_v_f64m4_m | ( | ... | ) | __riscv_vfabs_v_f64m4_tumu(__VA_ARGS__) |
| #define vfabs_v_f64m8 | ( | ... | ) | __riscv_vfabs_v_f64m8(__VA_ARGS__) |
| #define vfabs_v_f64m8_m | ( | ... | ) | __riscv_vfabs_v_f64m8_tumu(__VA_ARGS__) |
| #define vfadd_vf_f16m1 | ( | ... | ) | __riscv_vfadd_vf_f16m1(__VA_ARGS__) |
| #define vfadd_vf_f16m1_m | ( | ... | ) | __riscv_vfadd_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfadd_vf_f16m2 | ( | ... | ) | __riscv_vfadd_vf_f16m2(__VA_ARGS__) |
| #define vfadd_vf_f16m2_m | ( | ... | ) | __riscv_vfadd_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfadd_vf_f16m4 | ( | ... | ) | __riscv_vfadd_vf_f16m4(__VA_ARGS__) |
| #define vfadd_vf_f16m4_m | ( | ... | ) | __riscv_vfadd_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfadd_vf_f16m8 | ( | ... | ) | __riscv_vfadd_vf_f16m8(__VA_ARGS__) |
| #define vfadd_vf_f16m8_m | ( | ... | ) | __riscv_vfadd_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfadd_vf_f16mf2 | ( | ... | ) | __riscv_vfadd_vf_f16mf2(__VA_ARGS__) |
| #define vfadd_vf_f16mf2_m | ( | ... | ) | __riscv_vfadd_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfadd_vf_f16mf4 | ( | ... | ) | __riscv_vfadd_vf_f16mf4(__VA_ARGS__) |
| #define vfadd_vf_f16mf4_m | ( | ... | ) | __riscv_vfadd_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfadd_vf_f32m1 | ( | ... | ) | __riscv_vfadd_vf_f32m1(__VA_ARGS__) |
| #define vfadd_vf_f32m1_m | ( | ... | ) | __riscv_vfadd_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfadd_vf_f32m2 | ( | ... | ) | __riscv_vfadd_vf_f32m2(__VA_ARGS__) |
| #define vfadd_vf_f32m2_m | ( | ... | ) | __riscv_vfadd_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfadd_vf_f32m4 | ( | ... | ) | __riscv_vfadd_vf_f32m4(__VA_ARGS__) |
| #define vfadd_vf_f32m4_m | ( | ... | ) | __riscv_vfadd_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfadd_vf_f32m8 | ( | ... | ) | __riscv_vfadd_vf_f32m8(__VA_ARGS__) |
| #define vfadd_vf_f32m8_m | ( | ... | ) | __riscv_vfadd_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfadd_vf_f32mf2 | ( | ... | ) | __riscv_vfadd_vf_f32mf2(__VA_ARGS__) |
| #define vfadd_vf_f32mf2_m | ( | ... | ) | __riscv_vfadd_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfadd_vf_f64m1 | ( | ... | ) | __riscv_vfadd_vf_f64m1(__VA_ARGS__) |
| #define vfadd_vf_f64m1_m | ( | ... | ) | __riscv_vfadd_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfadd_vf_f64m2 | ( | ... | ) | __riscv_vfadd_vf_f64m2(__VA_ARGS__) |
| #define vfadd_vf_f64m2_m | ( | ... | ) | __riscv_vfadd_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfadd_vf_f64m4 | ( | ... | ) | __riscv_vfadd_vf_f64m4(__VA_ARGS__) |
| #define vfadd_vf_f64m4_m | ( | ... | ) | __riscv_vfadd_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfadd_vf_f64m8 | ( | ... | ) | __riscv_vfadd_vf_f64m8(__VA_ARGS__) |
| #define vfadd_vf_f64m8_m | ( | ... | ) | __riscv_vfadd_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfadd_vv_f16m1 | ( | ... | ) | __riscv_vfadd_vv_f16m1(__VA_ARGS__) |
| #define vfadd_vv_f16m1_m | ( | ... | ) | __riscv_vfadd_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfadd_vv_f16m2 | ( | ... | ) | __riscv_vfadd_vv_f16m2(__VA_ARGS__) |
| #define vfadd_vv_f16m2_m | ( | ... | ) | __riscv_vfadd_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfadd_vv_f16m4 | ( | ... | ) | __riscv_vfadd_vv_f16m4(__VA_ARGS__) |
| #define vfadd_vv_f16m4_m | ( | ... | ) | __riscv_vfadd_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfadd_vv_f16m8 | ( | ... | ) | __riscv_vfadd_vv_f16m8(__VA_ARGS__) |
| #define vfadd_vv_f16m8_m | ( | ... | ) | __riscv_vfadd_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfadd_vv_f16mf2 | ( | ... | ) | __riscv_vfadd_vv_f16mf2(__VA_ARGS__) |
| #define vfadd_vv_f16mf2_m | ( | ... | ) | __riscv_vfadd_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfadd_vv_f16mf4 | ( | ... | ) | __riscv_vfadd_vv_f16mf4(__VA_ARGS__) |
| #define vfadd_vv_f16mf4_m | ( | ... | ) | __riscv_vfadd_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfadd_vv_f32m1 | ( | ... | ) | __riscv_vfadd_vv_f32m1(__VA_ARGS__) |
| #define vfadd_vv_f32m1_m | ( | ... | ) | __riscv_vfadd_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfadd_vv_f32m2 | ( | ... | ) | __riscv_vfadd_vv_f32m2(__VA_ARGS__) |
| #define vfadd_vv_f32m2_m | ( | ... | ) | __riscv_vfadd_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfadd_vv_f32m4 | ( | ... | ) | __riscv_vfadd_vv_f32m4(__VA_ARGS__) |
| #define vfadd_vv_f32m4_m | ( | ... | ) | __riscv_vfadd_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfadd_vv_f32m8 | ( | ... | ) | __riscv_vfadd_vv_f32m8(__VA_ARGS__) |
| #define vfadd_vv_f32m8_m | ( | ... | ) | __riscv_vfadd_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfadd_vv_f32mf2 | ( | ... | ) | __riscv_vfadd_vv_f32mf2(__VA_ARGS__) |
| #define vfadd_vv_f32mf2_m | ( | ... | ) | __riscv_vfadd_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfadd_vv_f64m1 | ( | ... | ) | __riscv_vfadd_vv_f64m1(__VA_ARGS__) |
| #define vfadd_vv_f64m1_m | ( | ... | ) | __riscv_vfadd_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfadd_vv_f64m2 | ( | ... | ) | __riscv_vfadd_vv_f64m2(__VA_ARGS__) |
| #define vfadd_vv_f64m2_m | ( | ... | ) | __riscv_vfadd_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfadd_vv_f64m4 | ( | ... | ) | __riscv_vfadd_vv_f64m4(__VA_ARGS__) |
| #define vfadd_vv_f64m4_m | ( | ... | ) | __riscv_vfadd_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfadd_vv_f64m8 | ( | ... | ) | __riscv_vfadd_vv_f64m8(__VA_ARGS__) |
| #define vfadd_vv_f64m8_m | ( | ... | ) | __riscv_vfadd_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfclass_v_u16m1 | ( | ... | ) | __riscv_vfclass_v_u16m1(__VA_ARGS__) |
| #define vfclass_v_u16m1_m | ( | ... | ) | __riscv_vfclass_v_u16m1_tumu(__VA_ARGS__) |
| #define vfclass_v_u16m2 | ( | ... | ) | __riscv_vfclass_v_u16m2(__VA_ARGS__) |
| #define vfclass_v_u16m2_m | ( | ... | ) | __riscv_vfclass_v_u16m2_tumu(__VA_ARGS__) |
| #define vfclass_v_u16m4 | ( | ... | ) | __riscv_vfclass_v_u16m4(__VA_ARGS__) |
| #define vfclass_v_u16m4_m | ( | ... | ) | __riscv_vfclass_v_u16m4_tumu(__VA_ARGS__) |
| #define vfclass_v_u16m8 | ( | ... | ) | __riscv_vfclass_v_u16m8(__VA_ARGS__) |
| #define vfclass_v_u16m8_m | ( | ... | ) | __riscv_vfclass_v_u16m8_tumu(__VA_ARGS__) |
| #define vfclass_v_u16mf2 | ( | ... | ) | __riscv_vfclass_v_u16mf2(__VA_ARGS__) |
| #define vfclass_v_u16mf2_m | ( | ... | ) | __riscv_vfclass_v_u16mf2_tumu(__VA_ARGS__) |
| #define vfclass_v_u16mf4 | ( | ... | ) | __riscv_vfclass_v_u16mf4(__VA_ARGS__) |
| #define vfclass_v_u16mf4_m | ( | ... | ) | __riscv_vfclass_v_u16mf4_tumu(__VA_ARGS__) |
| #define vfclass_v_u32m1 | ( | ... | ) | __riscv_vfclass_v_u32m1(__VA_ARGS__) |
| #define vfclass_v_u32m1_m | ( | ... | ) | __riscv_vfclass_v_u32m1_tumu(__VA_ARGS__) |
| #define vfclass_v_u32m2 | ( | ... | ) | __riscv_vfclass_v_u32m2(__VA_ARGS__) |
| #define vfclass_v_u32m2_m | ( | ... | ) | __riscv_vfclass_v_u32m2_tumu(__VA_ARGS__) |
| #define vfclass_v_u32m4 | ( | ... | ) | __riscv_vfclass_v_u32m4(__VA_ARGS__) |
| #define vfclass_v_u32m4_m | ( | ... | ) | __riscv_vfclass_v_u32m4_tumu(__VA_ARGS__) |
| #define vfclass_v_u32m8 | ( | ... | ) | __riscv_vfclass_v_u32m8(__VA_ARGS__) |
| #define vfclass_v_u32m8_m | ( | ... | ) | __riscv_vfclass_v_u32m8_tumu(__VA_ARGS__) |
| #define vfclass_v_u32mf2 | ( | ... | ) | __riscv_vfclass_v_u32mf2(__VA_ARGS__) |
| #define vfclass_v_u32mf2_m | ( | ... | ) | __riscv_vfclass_v_u32mf2_tumu(__VA_ARGS__) |
| #define vfclass_v_u64m1 | ( | ... | ) | __riscv_vfclass_v_u64m1(__VA_ARGS__) |
| #define vfclass_v_u64m1_m | ( | ... | ) | __riscv_vfclass_v_u64m1_tumu(__VA_ARGS__) |
| #define vfclass_v_u64m2 | ( | ... | ) | __riscv_vfclass_v_u64m2(__VA_ARGS__) |
| #define vfclass_v_u64m2_m | ( | ... | ) | __riscv_vfclass_v_u64m2_tumu(__VA_ARGS__) |
| #define vfclass_v_u64m4 | ( | ... | ) | __riscv_vfclass_v_u64m4(__VA_ARGS__) |
| #define vfclass_v_u64m4_m | ( | ... | ) | __riscv_vfclass_v_u64m4_tumu(__VA_ARGS__) |
| #define vfclass_v_u64m8 | ( | ... | ) | __riscv_vfclass_v_u64m8(__VA_ARGS__) |
| #define vfclass_v_u64m8_m | ( | ... | ) | __riscv_vfclass_v_u64m8_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16m1 | ( | ... | ) | __riscv_vfcvt_f_x_v_f16m1(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16m1_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f16m1_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16m2 | ( | ... | ) | __riscv_vfcvt_f_x_v_f16m2(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16m2_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f16m2_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16m4 | ( | ... | ) | __riscv_vfcvt_f_x_v_f16m4(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16m4_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f16m4_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16m8 | ( | ... | ) | __riscv_vfcvt_f_x_v_f16m8(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16m8_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f16m8_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16mf2 | ( | ... | ) | __riscv_vfcvt_f_x_v_f16mf2(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16mf2_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16mf4 | ( | ... | ) | __riscv_vfcvt_f_x_v_f16mf4(__VA_ARGS__) |
| #define vfcvt_f_x_v_f16mf4_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32m1 | ( | ... | ) | __riscv_vfcvt_f_x_v_f32m1(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32m1_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f32m1_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32m2 | ( | ... | ) | __riscv_vfcvt_f_x_v_f32m2(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32m2_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f32m2_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32m4 | ( | ... | ) | __riscv_vfcvt_f_x_v_f32m4(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32m4_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f32m4_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32m8 | ( | ... | ) | __riscv_vfcvt_f_x_v_f32m8(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32m8_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f32m8_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32mf2 | ( | ... | ) | __riscv_vfcvt_f_x_v_f32mf2(__VA_ARGS__) |
| #define vfcvt_f_x_v_f32mf2_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f64m1 | ( | ... | ) | __riscv_vfcvt_f_x_v_f64m1(__VA_ARGS__) |
| #define vfcvt_f_x_v_f64m1_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f64m1_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f64m2 | ( | ... | ) | __riscv_vfcvt_f_x_v_f64m2(__VA_ARGS__) |
| #define vfcvt_f_x_v_f64m2_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f64m2_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f64m4 | ( | ... | ) | __riscv_vfcvt_f_x_v_f64m4(__VA_ARGS__) |
| #define vfcvt_f_x_v_f64m4_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f64m4_tumu(__VA_ARGS__) |
| #define vfcvt_f_x_v_f64m8 | ( | ... | ) | __riscv_vfcvt_f_x_v_f64m8(__VA_ARGS__) |
| #define vfcvt_f_x_v_f64m8_m | ( | ... | ) | __riscv_vfcvt_f_x_v_f64m8_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16m1 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16m1(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16m1_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16m1_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16m2 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16m2(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16m2_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16m2_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16m4 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16m4(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16m4_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16m4_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16m8 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16m8(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16m8_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16m8_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16mf2 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16mf2(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16mf2_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16mf4 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16mf4(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f16mf4_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32m1 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32m1(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32m1_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32m1_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32m2 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32m2(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32m2_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32m2_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32m4 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32m4(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32m4_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32m4_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32m8 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32m8(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32m8_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32m8_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32mf2 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32mf2(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f32mf2_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f64m1 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f64m1(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f64m1_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f64m1_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f64m2 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f64m2(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f64m2_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f64m2_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f64m4 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f64m4(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f64m4_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f64m4_tumu(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f64m8 | ( | ... | ) | __riscv_vfcvt_f_xu_v_f64m8(__VA_ARGS__) |
| #define vfcvt_f_xu_v_f64m8_m | ( | ... | ) | __riscv_vfcvt_f_xu_v_f64m8_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16m1 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16m1(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16m1_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16m1_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16m2 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16m2(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16m2_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16m2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16m4 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16m4(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16m4_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16m4_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16m8 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16m8(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16m8_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16m8_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16mf2 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16mf2(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16mf2_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16mf2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16mf4 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16mf4(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i16mf4_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i16mf4_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32m1 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32m1(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32m1_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32m1_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32m2 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32m2(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32m2_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32m2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32m4 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32m4(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32m4_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32m4_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32m8 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32m8(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32m8_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32m8_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32mf2 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32mf2(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i32mf2_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i32mf2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i64m1 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i64m1(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i64m1_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i64m1_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i64m2 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i64m2(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i64m2_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i64m2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i64m4 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i64m4(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i64m4_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i64m4_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i64m8 | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i64m8(__VA_ARGS__) |
| #define vfcvt_rtz_x_f_v_i64m8_m | ( | ... | ) | __riscv_vfcvt_rtz_x_f_v_i64m8_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16m1 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16m1(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16m1_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16m1_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16m2 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16m2(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16m2_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16m2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16m4 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16m4(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16m4_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16m4_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16m8 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16m8(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16m8_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16m8_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16mf2 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16mf2(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16mf2_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16mf2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16mf4 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16mf4(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u16mf4_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u16mf4_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32m1 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32m1(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32m1_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32m1_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32m2 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32m2(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32m2_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32m2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32m4 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32m4(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32m4_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32m4_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32m8 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32m8(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32m8_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32m8_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32mf2 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32mf2(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u32mf2_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u32mf2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u64m1 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u64m1(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u64m1_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u64m1_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u64m2 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u64m2(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u64m2_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u64m2_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u64m4 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u64m4(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u64m4_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u64m4_tumu(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u64m8 | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u64m8(__VA_ARGS__) |
| #define vfcvt_rtz_xu_f_v_u64m8_m | ( | ... | ) | __riscv_vfcvt_rtz_xu_f_v_u64m8_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16m1 | ( | ... | ) | __riscv_vfcvt_x_f_v_i16m1(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16m1_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i16m1_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16m2 | ( | ... | ) | __riscv_vfcvt_x_f_v_i16m2(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16m2_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i16m2_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16m4 | ( | ... | ) | __riscv_vfcvt_x_f_v_i16m4(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16m4_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i16m4_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16m8 | ( | ... | ) | __riscv_vfcvt_x_f_v_i16m8(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16m8_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i16m8_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16mf2 | ( | ... | ) | __riscv_vfcvt_x_f_v_i16mf2(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16mf2_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i16mf2_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16mf4 | ( | ... | ) | __riscv_vfcvt_x_f_v_i16mf4(__VA_ARGS__) |
| #define vfcvt_x_f_v_i16mf4_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i16mf4_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32m1 | ( | ... | ) | __riscv_vfcvt_x_f_v_i32m1(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32m1_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i32m1_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32m2 | ( | ... | ) | __riscv_vfcvt_x_f_v_i32m2(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32m2_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i32m2_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32m4 | ( | ... | ) | __riscv_vfcvt_x_f_v_i32m4(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32m4_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i32m4_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32m8 | ( | ... | ) | __riscv_vfcvt_x_f_v_i32m8(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32m8_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i32m8_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32mf2 | ( | ... | ) | __riscv_vfcvt_x_f_v_i32mf2(__VA_ARGS__) |
| #define vfcvt_x_f_v_i32mf2_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i32mf2_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i64m1 | ( | ... | ) | __riscv_vfcvt_x_f_v_i64m1(__VA_ARGS__) |
| #define vfcvt_x_f_v_i64m1_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i64m1_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i64m2 | ( | ... | ) | __riscv_vfcvt_x_f_v_i64m2(__VA_ARGS__) |
| #define vfcvt_x_f_v_i64m2_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i64m2_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i64m4 | ( | ... | ) | __riscv_vfcvt_x_f_v_i64m4(__VA_ARGS__) |
| #define vfcvt_x_f_v_i64m4_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i64m4_tumu(__VA_ARGS__) |
| #define vfcvt_x_f_v_i64m8 | ( | ... | ) | __riscv_vfcvt_x_f_v_i64m8(__VA_ARGS__) |
| #define vfcvt_x_f_v_i64m8_m | ( | ... | ) | __riscv_vfcvt_x_f_v_i64m8_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16m1 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16m1(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16m1_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16m1_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16m2 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16m2(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16m2_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16m2_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16m4 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16m4(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16m4_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16m4_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16m8 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16m8(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16m8_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16m8_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16mf2 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16mf2(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16mf2_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16mf2_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16mf4 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16mf4(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u16mf4_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u16mf4_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32m1 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32m1(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32m1_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32m1_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32m2 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32m2(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32m2_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32m2_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32m4 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32m4(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32m4_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32m4_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32m8 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32m8(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32m8_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32m8_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32mf2 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32mf2(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u32mf2_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u32mf2_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u64m1 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u64m1(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u64m1_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u64m1_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u64m2 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u64m2(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u64m2_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u64m2_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u64m4 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u64m4(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u64m4_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u64m4_tumu(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u64m8 | ( | ... | ) | __riscv_vfcvt_xu_f_v_u64m8(__VA_ARGS__) |
| #define vfcvt_xu_f_v_u64m8_m | ( | ... | ) | __riscv_vfcvt_xu_f_v_u64m8_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f16m1 | ( | ... | ) | __riscv_vfdiv_vf_f16m1(__VA_ARGS__) |
| #define vfdiv_vf_f16m1_m | ( | ... | ) | __riscv_vfdiv_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f16m2 | ( | ... | ) | __riscv_vfdiv_vf_f16m2(__VA_ARGS__) |
| #define vfdiv_vf_f16m2_m | ( | ... | ) | __riscv_vfdiv_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f16m4 | ( | ... | ) | __riscv_vfdiv_vf_f16m4(__VA_ARGS__) |
| #define vfdiv_vf_f16m4_m | ( | ... | ) | __riscv_vfdiv_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f16m8 | ( | ... | ) | __riscv_vfdiv_vf_f16m8(__VA_ARGS__) |
| #define vfdiv_vf_f16m8_m | ( | ... | ) | __riscv_vfdiv_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f16mf2 | ( | ... | ) | __riscv_vfdiv_vf_f16mf2(__VA_ARGS__) |
| #define vfdiv_vf_f16mf2_m | ( | ... | ) | __riscv_vfdiv_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f16mf4 | ( | ... | ) | __riscv_vfdiv_vf_f16mf4(__VA_ARGS__) |
| #define vfdiv_vf_f16mf4_m | ( | ... | ) | __riscv_vfdiv_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f32m1 | ( | ... | ) | __riscv_vfdiv_vf_f32m1(__VA_ARGS__) |
| #define vfdiv_vf_f32m1_m | ( | ... | ) | __riscv_vfdiv_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f32m2 | ( | ... | ) | __riscv_vfdiv_vf_f32m2(__VA_ARGS__) |
| #define vfdiv_vf_f32m2_m | ( | ... | ) | __riscv_vfdiv_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f32m4 | ( | ... | ) | __riscv_vfdiv_vf_f32m4(__VA_ARGS__) |
| #define vfdiv_vf_f32m4_m | ( | ... | ) | __riscv_vfdiv_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f32m8 | ( | ... | ) | __riscv_vfdiv_vf_f32m8(__VA_ARGS__) |
| #define vfdiv_vf_f32m8_m | ( | ... | ) | __riscv_vfdiv_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f32mf2 | ( | ... | ) | __riscv_vfdiv_vf_f32mf2(__VA_ARGS__) |
| #define vfdiv_vf_f32mf2_m | ( | ... | ) | __riscv_vfdiv_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f64m1 | ( | ... | ) | __riscv_vfdiv_vf_f64m1(__VA_ARGS__) |
| #define vfdiv_vf_f64m1_m | ( | ... | ) | __riscv_vfdiv_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f64m2 | ( | ... | ) | __riscv_vfdiv_vf_f64m2(__VA_ARGS__) |
| #define vfdiv_vf_f64m2_m | ( | ... | ) | __riscv_vfdiv_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f64m4 | ( | ... | ) | __riscv_vfdiv_vf_f64m4(__VA_ARGS__) |
| #define vfdiv_vf_f64m4_m | ( | ... | ) | __riscv_vfdiv_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfdiv_vf_f64m8 | ( | ... | ) | __riscv_vfdiv_vf_f64m8(__VA_ARGS__) |
| #define vfdiv_vf_f64m8_m | ( | ... | ) | __riscv_vfdiv_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f16m1 | ( | ... | ) | __riscv_vfdiv_vv_f16m1(__VA_ARGS__) |
| #define vfdiv_vv_f16m1_m | ( | ... | ) | __riscv_vfdiv_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f16m2 | ( | ... | ) | __riscv_vfdiv_vv_f16m2(__VA_ARGS__) |
| #define vfdiv_vv_f16m2_m | ( | ... | ) | __riscv_vfdiv_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f16m4 | ( | ... | ) | __riscv_vfdiv_vv_f16m4(__VA_ARGS__) |
| #define vfdiv_vv_f16m4_m | ( | ... | ) | __riscv_vfdiv_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f16m8 | ( | ... | ) | __riscv_vfdiv_vv_f16m8(__VA_ARGS__) |
| #define vfdiv_vv_f16m8_m | ( | ... | ) | __riscv_vfdiv_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f16mf2 | ( | ... | ) | __riscv_vfdiv_vv_f16mf2(__VA_ARGS__) |
| #define vfdiv_vv_f16mf2_m | ( | ... | ) | __riscv_vfdiv_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f16mf4 | ( | ... | ) | __riscv_vfdiv_vv_f16mf4(__VA_ARGS__) |
| #define vfdiv_vv_f16mf4_m | ( | ... | ) | __riscv_vfdiv_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f32m1 | ( | ... | ) | __riscv_vfdiv_vv_f32m1(__VA_ARGS__) |
| #define vfdiv_vv_f32m1_m | ( | ... | ) | __riscv_vfdiv_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f32m2 | ( | ... | ) | __riscv_vfdiv_vv_f32m2(__VA_ARGS__) |
| #define vfdiv_vv_f32m2_m | ( | ... | ) | __riscv_vfdiv_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f32m4 | ( | ... | ) | __riscv_vfdiv_vv_f32m4(__VA_ARGS__) |
| #define vfdiv_vv_f32m4_m | ( | ... | ) | __riscv_vfdiv_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f32m8 | ( | ... | ) | __riscv_vfdiv_vv_f32m8(__VA_ARGS__) |
| #define vfdiv_vv_f32m8_m | ( | ... | ) | __riscv_vfdiv_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f32mf2 | ( | ... | ) | __riscv_vfdiv_vv_f32mf2(__VA_ARGS__) |
| #define vfdiv_vv_f32mf2_m | ( | ... | ) | __riscv_vfdiv_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f64m1 | ( | ... | ) | __riscv_vfdiv_vv_f64m1(__VA_ARGS__) |
| #define vfdiv_vv_f64m1_m | ( | ... | ) | __riscv_vfdiv_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f64m2 | ( | ... | ) | __riscv_vfdiv_vv_f64m2(__VA_ARGS__) |
| #define vfdiv_vv_f64m2_m | ( | ... | ) | __riscv_vfdiv_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f64m4 | ( | ... | ) | __riscv_vfdiv_vv_f64m4(__VA_ARGS__) |
| #define vfdiv_vv_f64m4_m | ( | ... | ) | __riscv_vfdiv_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfdiv_vv_f64m8 | ( | ... | ) | __riscv_vfdiv_vv_f64m8(__VA_ARGS__) |
| #define vfdiv_vv_f64m8_m | ( | ... | ) | __riscv_vfdiv_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfirst_m_b1 | ( | ... | ) | __riscv_vfirst_m_b1(__VA_ARGS__) |
| #define vfirst_m_b16 | ( | ... | ) | __riscv_vfirst_m_b16(__VA_ARGS__) |
| #define vfirst_m_b16_m | ( | ... | ) | __riscv_vfirst_m_b16_m(__VA_ARGS__) |
| #define vfirst_m_b1_m | ( | ... | ) | __riscv_vfirst_m_b1_m(__VA_ARGS__) |
| #define vfirst_m_b2 | ( | ... | ) | __riscv_vfirst_m_b2(__VA_ARGS__) |
| #define vfirst_m_b2_m | ( | ... | ) | __riscv_vfirst_m_b2_m(__VA_ARGS__) |
| #define vfirst_m_b32 | ( | ... | ) | __riscv_vfirst_m_b32(__VA_ARGS__) |
| #define vfirst_m_b32_m | ( | ... | ) | __riscv_vfirst_m_b32_m(__VA_ARGS__) |
| #define vfirst_m_b4 | ( | ... | ) | __riscv_vfirst_m_b4(__VA_ARGS__) |
| #define vfirst_m_b4_m | ( | ... | ) | __riscv_vfirst_m_b4_m(__VA_ARGS__) |
| #define vfirst_m_b64 | ( | ... | ) | __riscv_vfirst_m_b64(__VA_ARGS__) |
| #define vfirst_m_b64_m | ( | ... | ) | __riscv_vfirst_m_b64_m(__VA_ARGS__) |
| #define vfirst_m_b8 | ( | ... | ) | __riscv_vfirst_m_b8(__VA_ARGS__) |
| #define vfirst_m_b8_m | ( | ... | ) | __riscv_vfirst_m_b8_m(__VA_ARGS__) |
| #define vfmacc_vf_f16m1 | ( | ... | ) | __riscv_vfmacc_vf_f16m1_tu(__VA_ARGS__) |
| #define vfmacc_vf_f16m1_m | ( | ... | ) | __riscv_vfmacc_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f16m2 | ( | ... | ) | __riscv_vfmacc_vf_f16m2_tu(__VA_ARGS__) |
| #define vfmacc_vf_f16m2_m | ( | ... | ) | __riscv_vfmacc_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f16m4 | ( | ... | ) | __riscv_vfmacc_vf_f16m4_tu(__VA_ARGS__) |
| #define vfmacc_vf_f16m4_m | ( | ... | ) | __riscv_vfmacc_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f16m8 | ( | ... | ) | __riscv_vfmacc_vf_f16m8_tu(__VA_ARGS__) |
| #define vfmacc_vf_f16m8_m | ( | ... | ) | __riscv_vfmacc_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f16mf2 | ( | ... | ) | __riscv_vfmacc_vf_f16mf2_tu(__VA_ARGS__) |
| #define vfmacc_vf_f16mf2_m | ( | ... | ) | __riscv_vfmacc_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f16mf4 | ( | ... | ) | __riscv_vfmacc_vf_f16mf4_tu(__VA_ARGS__) |
| #define vfmacc_vf_f16mf4_m | ( | ... | ) | __riscv_vfmacc_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f32m1 | ( | ... | ) | __riscv_vfmacc_vf_f32m1_tu(__VA_ARGS__) |
| #define vfmacc_vf_f32m1_m | ( | ... | ) | __riscv_vfmacc_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f32m2 | ( | ... | ) | __riscv_vfmacc_vf_f32m2_tu(__VA_ARGS__) |
| #define vfmacc_vf_f32m2_m | ( | ... | ) | __riscv_vfmacc_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f32m4 | ( | ... | ) | __riscv_vfmacc_vf_f32m4_tu(__VA_ARGS__) |
| #define vfmacc_vf_f32m4_m | ( | ... | ) | __riscv_vfmacc_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f32m8 | ( | ... | ) | __riscv_vfmacc_vf_f32m8_tu(__VA_ARGS__) |
| #define vfmacc_vf_f32m8_m | ( | ... | ) | __riscv_vfmacc_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f32mf2 | ( | ... | ) | __riscv_vfmacc_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfmacc_vf_f32mf2_m | ( | ... | ) | __riscv_vfmacc_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f64m1 | ( | ... | ) | __riscv_vfmacc_vf_f64m1_tu(__VA_ARGS__) |
| #define vfmacc_vf_f64m1_m | ( | ... | ) | __riscv_vfmacc_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f64m2 | ( | ... | ) | __riscv_vfmacc_vf_f64m2_tu(__VA_ARGS__) |
| #define vfmacc_vf_f64m2_m | ( | ... | ) | __riscv_vfmacc_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f64m4 | ( | ... | ) | __riscv_vfmacc_vf_f64m4_tu(__VA_ARGS__) |
| #define vfmacc_vf_f64m4_m | ( | ... | ) | __riscv_vfmacc_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfmacc_vf_f64m8 | ( | ... | ) | __riscv_vfmacc_vf_f64m8_tu(__VA_ARGS__) |
| #define vfmacc_vf_f64m8_m | ( | ... | ) | __riscv_vfmacc_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f16m1 | ( | ... | ) | __riscv_vfmacc_vv_f16m1_tu(__VA_ARGS__) |
| #define vfmacc_vv_f16m1_m | ( | ... | ) | __riscv_vfmacc_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f16m2 | ( | ... | ) | __riscv_vfmacc_vv_f16m2_tu(__VA_ARGS__) |
| #define vfmacc_vv_f16m2_m | ( | ... | ) | __riscv_vfmacc_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f16m4 | ( | ... | ) | __riscv_vfmacc_vv_f16m4_tu(__VA_ARGS__) |
| #define vfmacc_vv_f16m4_m | ( | ... | ) | __riscv_vfmacc_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f16m8 | ( | ... | ) | __riscv_vfmacc_vv_f16m8_tu(__VA_ARGS__) |
| #define vfmacc_vv_f16m8_m | ( | ... | ) | __riscv_vfmacc_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f16mf2 | ( | ... | ) | __riscv_vfmacc_vv_f16mf2_tu(__VA_ARGS__) |
| #define vfmacc_vv_f16mf2_m | ( | ... | ) | __riscv_vfmacc_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f16mf4 | ( | ... | ) | __riscv_vfmacc_vv_f16mf4_tu(__VA_ARGS__) |
| #define vfmacc_vv_f16mf4_m | ( | ... | ) | __riscv_vfmacc_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f32m1 | ( | ... | ) | __riscv_vfmacc_vv_f32m1_tu(__VA_ARGS__) |
| #define vfmacc_vv_f32m1_m | ( | ... | ) | __riscv_vfmacc_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f32m2 | ( | ... | ) | __riscv_vfmacc_vv_f32m2_tu(__VA_ARGS__) |
| #define vfmacc_vv_f32m2_m | ( | ... | ) | __riscv_vfmacc_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f32m4 | ( | ... | ) | __riscv_vfmacc_vv_f32m4_tu(__VA_ARGS__) |
| #define vfmacc_vv_f32m4_m | ( | ... | ) | __riscv_vfmacc_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f32m8 | ( | ... | ) | __riscv_vfmacc_vv_f32m8_tu(__VA_ARGS__) |
| #define vfmacc_vv_f32m8_m | ( | ... | ) | __riscv_vfmacc_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f32mf2 | ( | ... | ) | __riscv_vfmacc_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfmacc_vv_f32mf2_m | ( | ... | ) | __riscv_vfmacc_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f64m1 | ( | ... | ) | __riscv_vfmacc_vv_f64m1_tu(__VA_ARGS__) |
| #define vfmacc_vv_f64m1_m | ( | ... | ) | __riscv_vfmacc_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f64m2 | ( | ... | ) | __riscv_vfmacc_vv_f64m2_tu(__VA_ARGS__) |
| #define vfmacc_vv_f64m2_m | ( | ... | ) | __riscv_vfmacc_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f64m4 | ( | ... | ) | __riscv_vfmacc_vv_f64m4_tu(__VA_ARGS__) |
| #define vfmacc_vv_f64m4_m | ( | ... | ) | __riscv_vfmacc_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfmacc_vv_f64m8 | ( | ... | ) | __riscv_vfmacc_vv_f64m8_tu(__VA_ARGS__) |
| #define vfmacc_vv_f64m8_m | ( | ... | ) | __riscv_vfmacc_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f16m1 | ( | ... | ) | __riscv_vfmadd_vf_f16m1_tu(__VA_ARGS__) |
| #define vfmadd_vf_f16m1_m | ( | ... | ) | __riscv_vfmadd_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f16m2 | ( | ... | ) | __riscv_vfmadd_vf_f16m2_tu(__VA_ARGS__) |
| #define vfmadd_vf_f16m2_m | ( | ... | ) | __riscv_vfmadd_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f16m4 | ( | ... | ) | __riscv_vfmadd_vf_f16m4_tu(__VA_ARGS__) |
| #define vfmadd_vf_f16m4_m | ( | ... | ) | __riscv_vfmadd_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f16m8 | ( | ... | ) | __riscv_vfmadd_vf_f16m8_tu(__VA_ARGS__) |
| #define vfmadd_vf_f16m8_m | ( | ... | ) | __riscv_vfmadd_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f16mf2 | ( | ... | ) | __riscv_vfmadd_vf_f16mf2_tu(__VA_ARGS__) |
| #define vfmadd_vf_f16mf2_m | ( | ... | ) | __riscv_vfmadd_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f16mf4 | ( | ... | ) | __riscv_vfmadd_vf_f16mf4_tu(__VA_ARGS__) |
| #define vfmadd_vf_f16mf4_m | ( | ... | ) | __riscv_vfmadd_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f32m1 | ( | ... | ) | __riscv_vfmadd_vf_f32m1_tu(__VA_ARGS__) |
| #define vfmadd_vf_f32m1_m | ( | ... | ) | __riscv_vfmadd_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f32m2 | ( | ... | ) | __riscv_vfmadd_vf_f32m2_tu(__VA_ARGS__) |
| #define vfmadd_vf_f32m2_m | ( | ... | ) | __riscv_vfmadd_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f32m4 | ( | ... | ) | __riscv_vfmadd_vf_f32m4_tu(__VA_ARGS__) |
| #define vfmadd_vf_f32m4_m | ( | ... | ) | __riscv_vfmadd_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f32m8 | ( | ... | ) | __riscv_vfmadd_vf_f32m8_tu(__VA_ARGS__) |
| #define vfmadd_vf_f32m8_m | ( | ... | ) | __riscv_vfmadd_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f32mf2 | ( | ... | ) | __riscv_vfmadd_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfmadd_vf_f32mf2_m | ( | ... | ) | __riscv_vfmadd_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f64m1 | ( | ... | ) | __riscv_vfmadd_vf_f64m1_tu(__VA_ARGS__) |
| #define vfmadd_vf_f64m1_m | ( | ... | ) | __riscv_vfmadd_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f64m2 | ( | ... | ) | __riscv_vfmadd_vf_f64m2_tu(__VA_ARGS__) |
| #define vfmadd_vf_f64m2_m | ( | ... | ) | __riscv_vfmadd_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f64m4 | ( | ... | ) | __riscv_vfmadd_vf_f64m4_tu(__VA_ARGS__) |
| #define vfmadd_vf_f64m4_m | ( | ... | ) | __riscv_vfmadd_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfmadd_vf_f64m8 | ( | ... | ) | __riscv_vfmadd_vf_f64m8_tu(__VA_ARGS__) |
| #define vfmadd_vf_f64m8_m | ( | ... | ) | __riscv_vfmadd_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f16m1 | ( | ... | ) | __riscv_vfmadd_vv_f16m1_tu(__VA_ARGS__) |
| #define vfmadd_vv_f16m1_m | ( | ... | ) | __riscv_vfmadd_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f16m2 | ( | ... | ) | __riscv_vfmadd_vv_f16m2_tu(__VA_ARGS__) |
| #define vfmadd_vv_f16m2_m | ( | ... | ) | __riscv_vfmadd_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f16m4 | ( | ... | ) | __riscv_vfmadd_vv_f16m4_tu(__VA_ARGS__) |
| #define vfmadd_vv_f16m4_m | ( | ... | ) | __riscv_vfmadd_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f16m8 | ( | ... | ) | __riscv_vfmadd_vv_f16m8_tu(__VA_ARGS__) |
| #define vfmadd_vv_f16m8_m | ( | ... | ) | __riscv_vfmadd_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f16mf2 | ( | ... | ) | __riscv_vfmadd_vv_f16mf2_tu(__VA_ARGS__) |
| #define vfmadd_vv_f16mf2_m | ( | ... | ) | __riscv_vfmadd_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f16mf4 | ( | ... | ) | __riscv_vfmadd_vv_f16mf4_tu(__VA_ARGS__) |
| #define vfmadd_vv_f16mf4_m | ( | ... | ) | __riscv_vfmadd_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f32m1 | ( | ... | ) | __riscv_vfmadd_vv_f32m1_tu(__VA_ARGS__) |
| #define vfmadd_vv_f32m1_m | ( | ... | ) | __riscv_vfmadd_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f32m2 | ( | ... | ) | __riscv_vfmadd_vv_f32m2_tu(__VA_ARGS__) |
| #define vfmadd_vv_f32m2_m | ( | ... | ) | __riscv_vfmadd_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f32m4 | ( | ... | ) | __riscv_vfmadd_vv_f32m4_tu(__VA_ARGS__) |
| #define vfmadd_vv_f32m4_m | ( | ... | ) | __riscv_vfmadd_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f32m8 | ( | ... | ) | __riscv_vfmadd_vv_f32m8_tu(__VA_ARGS__) |
| #define vfmadd_vv_f32m8_m | ( | ... | ) | __riscv_vfmadd_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f32mf2 | ( | ... | ) | __riscv_vfmadd_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfmadd_vv_f32mf2_m | ( | ... | ) | __riscv_vfmadd_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f64m1 | ( | ... | ) | __riscv_vfmadd_vv_f64m1_tu(__VA_ARGS__) |
| #define vfmadd_vv_f64m1_m | ( | ... | ) | __riscv_vfmadd_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f64m2 | ( | ... | ) | __riscv_vfmadd_vv_f64m2_tu(__VA_ARGS__) |
| #define vfmadd_vv_f64m2_m | ( | ... | ) | __riscv_vfmadd_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f64m4 | ( | ... | ) | __riscv_vfmadd_vv_f64m4_tu(__VA_ARGS__) |
| #define vfmadd_vv_f64m4_m | ( | ... | ) | __riscv_vfmadd_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfmadd_vv_f64m8 | ( | ... | ) | __riscv_vfmadd_vv_f64m8_tu(__VA_ARGS__) |
| #define vfmadd_vv_f64m8_m | ( | ... | ) | __riscv_vfmadd_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfmax_vf_f16m1 | ( | ... | ) | __riscv_vfmax_vf_f16m1(__VA_ARGS__) |
| #define vfmax_vf_f16m1_m | ( | ... | ) | __riscv_vfmax_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfmax_vf_f16m2 | ( | ... | ) | __riscv_vfmax_vf_f16m2(__VA_ARGS__) |
| #define vfmax_vf_f16m2_m | ( | ... | ) | __riscv_vfmax_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfmax_vf_f16m4 | ( | ... | ) | __riscv_vfmax_vf_f16m4(__VA_ARGS__) |
| #define vfmax_vf_f16m4_m | ( | ... | ) | __riscv_vfmax_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfmax_vf_f16m8 | ( | ... | ) | __riscv_vfmax_vf_f16m8(__VA_ARGS__) |
| #define vfmax_vf_f16m8_m | ( | ... | ) | __riscv_vfmax_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfmax_vf_f16mf2 | ( | ... | ) | __riscv_vfmax_vf_f16mf2(__VA_ARGS__) |
| #define vfmax_vf_f16mf2_m | ( | ... | ) | __riscv_vfmax_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfmax_vf_f16mf4 | ( | ... | ) | __riscv_vfmax_vf_f16mf4(__VA_ARGS__) |
| #define vfmax_vf_f16mf4_m | ( | ... | ) | __riscv_vfmax_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfmax_vf_f32m1 | ( | ... | ) | __riscv_vfmax_vf_f32m1(__VA_ARGS__) |
| #define vfmax_vf_f32m1_m | ( | ... | ) | __riscv_vfmax_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfmax_vf_f32m2 | ( | ... | ) | __riscv_vfmax_vf_f32m2(__VA_ARGS__) |
| #define vfmax_vf_f32m2_m | ( | ... | ) | __riscv_vfmax_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfmax_vf_f32m4 | ( | ... | ) | __riscv_vfmax_vf_f32m4(__VA_ARGS__) |
| #define vfmax_vf_f32m4_m | ( | ... | ) | __riscv_vfmax_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfmax_vf_f32m8 | ( | ... | ) | __riscv_vfmax_vf_f32m8(__VA_ARGS__) |
| #define vfmax_vf_f32m8_m | ( | ... | ) | __riscv_vfmax_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfmax_vf_f32mf2 | ( | ... | ) | __riscv_vfmax_vf_f32mf2(__VA_ARGS__) |
| #define vfmax_vf_f32mf2_m | ( | ... | ) | __riscv_vfmax_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfmax_vf_f64m1 | ( | ... | ) | __riscv_vfmax_vf_f64m1(__VA_ARGS__) |
| #define vfmax_vf_f64m1_m | ( | ... | ) | __riscv_vfmax_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfmax_vf_f64m2 | ( | ... | ) | __riscv_vfmax_vf_f64m2(__VA_ARGS__) |
| #define vfmax_vf_f64m2_m | ( | ... | ) | __riscv_vfmax_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfmax_vf_f64m4 | ( | ... | ) | __riscv_vfmax_vf_f64m4(__VA_ARGS__) |
| #define vfmax_vf_f64m4_m | ( | ... | ) | __riscv_vfmax_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfmax_vf_f64m8 | ( | ... | ) | __riscv_vfmax_vf_f64m8(__VA_ARGS__) |
| #define vfmax_vf_f64m8_m | ( | ... | ) | __riscv_vfmax_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfmax_vv_f16m1 | ( | ... | ) | __riscv_vfmax_vv_f16m1(__VA_ARGS__) |
| #define vfmax_vv_f16m1_m | ( | ... | ) | __riscv_vfmax_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfmax_vv_f16m2 | ( | ... | ) | __riscv_vfmax_vv_f16m2(__VA_ARGS__) |
| #define vfmax_vv_f16m2_m | ( | ... | ) | __riscv_vfmax_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfmax_vv_f16m4 | ( | ... | ) | __riscv_vfmax_vv_f16m4(__VA_ARGS__) |
| #define vfmax_vv_f16m4_m | ( | ... | ) | __riscv_vfmax_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfmax_vv_f16m8 | ( | ... | ) | __riscv_vfmax_vv_f16m8(__VA_ARGS__) |
| #define vfmax_vv_f16m8_m | ( | ... | ) | __riscv_vfmax_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfmax_vv_f16mf2 | ( | ... | ) | __riscv_vfmax_vv_f16mf2(__VA_ARGS__) |
| #define vfmax_vv_f16mf2_m | ( | ... | ) | __riscv_vfmax_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfmax_vv_f16mf4 | ( | ... | ) | __riscv_vfmax_vv_f16mf4(__VA_ARGS__) |
| #define vfmax_vv_f16mf4_m | ( | ... | ) | __riscv_vfmax_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfmax_vv_f32m1 | ( | ... | ) | __riscv_vfmax_vv_f32m1(__VA_ARGS__) |
| #define vfmax_vv_f32m1_m | ( | ... | ) | __riscv_vfmax_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfmax_vv_f32m2 | ( | ... | ) | __riscv_vfmax_vv_f32m2(__VA_ARGS__) |
| #define vfmax_vv_f32m2_m | ( | ... | ) | __riscv_vfmax_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfmax_vv_f32m4 | ( | ... | ) | __riscv_vfmax_vv_f32m4(__VA_ARGS__) |
| #define vfmax_vv_f32m4_m | ( | ... | ) | __riscv_vfmax_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfmax_vv_f32m8 | ( | ... | ) | __riscv_vfmax_vv_f32m8(__VA_ARGS__) |
| #define vfmax_vv_f32m8_m | ( | ... | ) | __riscv_vfmax_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfmax_vv_f32mf2 | ( | ... | ) | __riscv_vfmax_vv_f32mf2(__VA_ARGS__) |
| #define vfmax_vv_f32mf2_m | ( | ... | ) | __riscv_vfmax_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfmax_vv_f64m1 | ( | ... | ) | __riscv_vfmax_vv_f64m1(__VA_ARGS__) |
| #define vfmax_vv_f64m1_m | ( | ... | ) | __riscv_vfmax_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfmax_vv_f64m2 | ( | ... | ) | __riscv_vfmax_vv_f64m2(__VA_ARGS__) |
| #define vfmax_vv_f64m2_m | ( | ... | ) | __riscv_vfmax_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfmax_vv_f64m4 | ( | ... | ) | __riscv_vfmax_vv_f64m4(__VA_ARGS__) |
| #define vfmax_vv_f64m4_m | ( | ... | ) | __riscv_vfmax_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfmax_vv_f64m8 | ( | ... | ) | __riscv_vfmax_vv_f64m8(__VA_ARGS__) |
| #define vfmax_vv_f64m8_m | ( | ... | ) | __riscv_vfmax_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfmerge_vfm_f16m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f16m1((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f16m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f16m2((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f16m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f16m4((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f16m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f16m8((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f16mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f16mf2((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f16mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f16mf4((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f32m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f32m1((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f32m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f32m2((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f32m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f32m4((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f32m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f32m8((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f32mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f32mf2((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f64m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f64m1((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f64m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f64m2((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f64m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f64m4((op1), (op2), (mask), (vl)) |
| #define vfmerge_vfm_f64m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vfmerge_vfm_f64m8((op1), (op2), (mask), (vl)) |
| #define vfmin_vf_f16m1 | ( | ... | ) | __riscv_vfmin_vf_f16m1(__VA_ARGS__) |
| #define vfmin_vf_f16m1_m | ( | ... | ) | __riscv_vfmin_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfmin_vf_f16m2 | ( | ... | ) | __riscv_vfmin_vf_f16m2(__VA_ARGS__) |
| #define vfmin_vf_f16m2_m | ( | ... | ) | __riscv_vfmin_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfmin_vf_f16m4 | ( | ... | ) | __riscv_vfmin_vf_f16m4(__VA_ARGS__) |
| #define vfmin_vf_f16m4_m | ( | ... | ) | __riscv_vfmin_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfmin_vf_f16m8 | ( | ... | ) | __riscv_vfmin_vf_f16m8(__VA_ARGS__) |
| #define vfmin_vf_f16m8_m | ( | ... | ) | __riscv_vfmin_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfmin_vf_f16mf2 | ( | ... | ) | __riscv_vfmin_vf_f16mf2(__VA_ARGS__) |
| #define vfmin_vf_f16mf2_m | ( | ... | ) | __riscv_vfmin_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfmin_vf_f16mf4 | ( | ... | ) | __riscv_vfmin_vf_f16mf4(__VA_ARGS__) |
| #define vfmin_vf_f16mf4_m | ( | ... | ) | __riscv_vfmin_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfmin_vf_f32m1 | ( | ... | ) | __riscv_vfmin_vf_f32m1(__VA_ARGS__) |
| #define vfmin_vf_f32m1_m | ( | ... | ) | __riscv_vfmin_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfmin_vf_f32m2 | ( | ... | ) | __riscv_vfmin_vf_f32m2(__VA_ARGS__) |
| #define vfmin_vf_f32m2_m | ( | ... | ) | __riscv_vfmin_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfmin_vf_f32m4 | ( | ... | ) | __riscv_vfmin_vf_f32m4(__VA_ARGS__) |
| #define vfmin_vf_f32m4_m | ( | ... | ) | __riscv_vfmin_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfmin_vf_f32m8 | ( | ... | ) | __riscv_vfmin_vf_f32m8(__VA_ARGS__) |
| #define vfmin_vf_f32m8_m | ( | ... | ) | __riscv_vfmin_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfmin_vf_f32mf2 | ( | ... | ) | __riscv_vfmin_vf_f32mf2(__VA_ARGS__) |
| #define vfmin_vf_f32mf2_m | ( | ... | ) | __riscv_vfmin_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfmin_vf_f64m1 | ( | ... | ) | __riscv_vfmin_vf_f64m1(__VA_ARGS__) |
| #define vfmin_vf_f64m1_m | ( | ... | ) | __riscv_vfmin_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfmin_vf_f64m2 | ( | ... | ) | __riscv_vfmin_vf_f64m2(__VA_ARGS__) |
| #define vfmin_vf_f64m2_m | ( | ... | ) | __riscv_vfmin_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfmin_vf_f64m4 | ( | ... | ) | __riscv_vfmin_vf_f64m4(__VA_ARGS__) |
| #define vfmin_vf_f64m4_m | ( | ... | ) | __riscv_vfmin_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfmin_vf_f64m8 | ( | ... | ) | __riscv_vfmin_vf_f64m8(__VA_ARGS__) |
| #define vfmin_vf_f64m8_m | ( | ... | ) | __riscv_vfmin_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfmin_vv_f16m1 | ( | ... | ) | __riscv_vfmin_vv_f16m1(__VA_ARGS__) |
| #define vfmin_vv_f16m1_m | ( | ... | ) | __riscv_vfmin_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfmin_vv_f16m2 | ( | ... | ) | __riscv_vfmin_vv_f16m2(__VA_ARGS__) |
| #define vfmin_vv_f16m2_m | ( | ... | ) | __riscv_vfmin_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfmin_vv_f16m4 | ( | ... | ) | __riscv_vfmin_vv_f16m4(__VA_ARGS__) |
| #define vfmin_vv_f16m4_m | ( | ... | ) | __riscv_vfmin_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfmin_vv_f16m8 | ( | ... | ) | __riscv_vfmin_vv_f16m8(__VA_ARGS__) |
| #define vfmin_vv_f16m8_m | ( | ... | ) | __riscv_vfmin_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfmin_vv_f16mf2 | ( | ... | ) | __riscv_vfmin_vv_f16mf2(__VA_ARGS__) |
| #define vfmin_vv_f16mf2_m | ( | ... | ) | __riscv_vfmin_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfmin_vv_f16mf4 | ( | ... | ) | __riscv_vfmin_vv_f16mf4(__VA_ARGS__) |
| #define vfmin_vv_f16mf4_m | ( | ... | ) | __riscv_vfmin_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfmin_vv_f32m1 | ( | ... | ) | __riscv_vfmin_vv_f32m1(__VA_ARGS__) |
| #define vfmin_vv_f32m1_m | ( | ... | ) | __riscv_vfmin_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfmin_vv_f32m2 | ( | ... | ) | __riscv_vfmin_vv_f32m2(__VA_ARGS__) |
| #define vfmin_vv_f32m2_m | ( | ... | ) | __riscv_vfmin_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfmin_vv_f32m4 | ( | ... | ) | __riscv_vfmin_vv_f32m4(__VA_ARGS__) |
| #define vfmin_vv_f32m4_m | ( | ... | ) | __riscv_vfmin_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfmin_vv_f32m8 | ( | ... | ) | __riscv_vfmin_vv_f32m8(__VA_ARGS__) |
| #define vfmin_vv_f32m8_m | ( | ... | ) | __riscv_vfmin_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfmin_vv_f32mf2 | ( | ... | ) | __riscv_vfmin_vv_f32mf2(__VA_ARGS__) |
| #define vfmin_vv_f32mf2_m | ( | ... | ) | __riscv_vfmin_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfmin_vv_f64m1 | ( | ... | ) | __riscv_vfmin_vv_f64m1(__VA_ARGS__) |
| #define vfmin_vv_f64m1_m | ( | ... | ) | __riscv_vfmin_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfmin_vv_f64m2 | ( | ... | ) | __riscv_vfmin_vv_f64m2(__VA_ARGS__) |
| #define vfmin_vv_f64m2_m | ( | ... | ) | __riscv_vfmin_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfmin_vv_f64m4 | ( | ... | ) | __riscv_vfmin_vv_f64m4(__VA_ARGS__) |
| #define vfmin_vv_f64m4_m | ( | ... | ) | __riscv_vfmin_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfmin_vv_f64m8 | ( | ... | ) | __riscv_vfmin_vv_f64m8(__VA_ARGS__) |
| #define vfmin_vv_f64m8_m | ( | ... | ) | __riscv_vfmin_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f16m1 | ( | ... | ) | __riscv_vfmsac_vf_f16m1_tu(__VA_ARGS__) |
| #define vfmsac_vf_f16m1_m | ( | ... | ) | __riscv_vfmsac_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f16m2 | ( | ... | ) | __riscv_vfmsac_vf_f16m2_tu(__VA_ARGS__) |
| #define vfmsac_vf_f16m2_m | ( | ... | ) | __riscv_vfmsac_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f16m4 | ( | ... | ) | __riscv_vfmsac_vf_f16m4_tu(__VA_ARGS__) |
| #define vfmsac_vf_f16m4_m | ( | ... | ) | __riscv_vfmsac_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f16m8 | ( | ... | ) | __riscv_vfmsac_vf_f16m8_tu(__VA_ARGS__) |
| #define vfmsac_vf_f16m8_m | ( | ... | ) | __riscv_vfmsac_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f16mf2 | ( | ... | ) | __riscv_vfmsac_vf_f16mf2_tu(__VA_ARGS__) |
| #define vfmsac_vf_f16mf2_m | ( | ... | ) | __riscv_vfmsac_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f16mf4 | ( | ... | ) | __riscv_vfmsac_vf_f16mf4_tu(__VA_ARGS__) |
| #define vfmsac_vf_f16mf4_m | ( | ... | ) | __riscv_vfmsac_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f32m1 | ( | ... | ) | __riscv_vfmsac_vf_f32m1_tu(__VA_ARGS__) |
| #define vfmsac_vf_f32m1_m | ( | ... | ) | __riscv_vfmsac_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f32m2 | ( | ... | ) | __riscv_vfmsac_vf_f32m2_tu(__VA_ARGS__) |
| #define vfmsac_vf_f32m2_m | ( | ... | ) | __riscv_vfmsac_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f32m4 | ( | ... | ) | __riscv_vfmsac_vf_f32m4_tu(__VA_ARGS__) |
| #define vfmsac_vf_f32m4_m | ( | ... | ) | __riscv_vfmsac_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f32m8 | ( | ... | ) | __riscv_vfmsac_vf_f32m8_tu(__VA_ARGS__) |
| #define vfmsac_vf_f32m8_m | ( | ... | ) | __riscv_vfmsac_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f32mf2 | ( | ... | ) | __riscv_vfmsac_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfmsac_vf_f32mf2_m | ( | ... | ) | __riscv_vfmsac_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f64m1 | ( | ... | ) | __riscv_vfmsac_vf_f64m1_tu(__VA_ARGS__) |
| #define vfmsac_vf_f64m1_m | ( | ... | ) | __riscv_vfmsac_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f64m2 | ( | ... | ) | __riscv_vfmsac_vf_f64m2_tu(__VA_ARGS__) |
| #define vfmsac_vf_f64m2_m | ( | ... | ) | __riscv_vfmsac_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f64m4 | ( | ... | ) | __riscv_vfmsac_vf_f64m4_tu(__VA_ARGS__) |
| #define vfmsac_vf_f64m4_m | ( | ... | ) | __riscv_vfmsac_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfmsac_vf_f64m8 | ( | ... | ) | __riscv_vfmsac_vf_f64m8_tu(__VA_ARGS__) |
| #define vfmsac_vf_f64m8_m | ( | ... | ) | __riscv_vfmsac_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f16m1 | ( | ... | ) | __riscv_vfmsac_vv_f16m1_tu(__VA_ARGS__) |
| #define vfmsac_vv_f16m1_m | ( | ... | ) | __riscv_vfmsac_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f16m2 | ( | ... | ) | __riscv_vfmsac_vv_f16m2_tu(__VA_ARGS__) |
| #define vfmsac_vv_f16m2_m | ( | ... | ) | __riscv_vfmsac_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f16m4 | ( | ... | ) | __riscv_vfmsac_vv_f16m4_tu(__VA_ARGS__) |
| #define vfmsac_vv_f16m4_m | ( | ... | ) | __riscv_vfmsac_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f16m8 | ( | ... | ) | __riscv_vfmsac_vv_f16m8_tu(__VA_ARGS__) |
| #define vfmsac_vv_f16m8_m | ( | ... | ) | __riscv_vfmsac_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f16mf2 | ( | ... | ) | __riscv_vfmsac_vv_f16mf2_tu(__VA_ARGS__) |
| #define vfmsac_vv_f16mf2_m | ( | ... | ) | __riscv_vfmsac_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f16mf4 | ( | ... | ) | __riscv_vfmsac_vv_f16mf4_tu(__VA_ARGS__) |
| #define vfmsac_vv_f16mf4_m | ( | ... | ) | __riscv_vfmsac_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f32m1 | ( | ... | ) | __riscv_vfmsac_vv_f32m1_tu(__VA_ARGS__) |
| #define vfmsac_vv_f32m1_m | ( | ... | ) | __riscv_vfmsac_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f32m2 | ( | ... | ) | __riscv_vfmsac_vv_f32m2_tu(__VA_ARGS__) |
| #define vfmsac_vv_f32m2_m | ( | ... | ) | __riscv_vfmsac_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f32m4 | ( | ... | ) | __riscv_vfmsac_vv_f32m4_tu(__VA_ARGS__) |
| #define vfmsac_vv_f32m4_m | ( | ... | ) | __riscv_vfmsac_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f32m8 | ( | ... | ) | __riscv_vfmsac_vv_f32m8_tu(__VA_ARGS__) |
| #define vfmsac_vv_f32m8_m | ( | ... | ) | __riscv_vfmsac_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f32mf2 | ( | ... | ) | __riscv_vfmsac_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfmsac_vv_f32mf2_m | ( | ... | ) | __riscv_vfmsac_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f64m1 | ( | ... | ) | __riscv_vfmsac_vv_f64m1_tu(__VA_ARGS__) |
| #define vfmsac_vv_f64m1_m | ( | ... | ) | __riscv_vfmsac_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f64m2 | ( | ... | ) | __riscv_vfmsac_vv_f64m2_tu(__VA_ARGS__) |
| #define vfmsac_vv_f64m2_m | ( | ... | ) | __riscv_vfmsac_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f64m4 | ( | ... | ) | __riscv_vfmsac_vv_f64m4_tu(__VA_ARGS__) |
| #define vfmsac_vv_f64m4_m | ( | ... | ) | __riscv_vfmsac_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfmsac_vv_f64m8 | ( | ... | ) | __riscv_vfmsac_vv_f64m8_tu(__VA_ARGS__) |
| #define vfmsac_vv_f64m8_m | ( | ... | ) | __riscv_vfmsac_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f16m1 | ( | ... | ) | __riscv_vfmsub_vf_f16m1_tu(__VA_ARGS__) |
| #define vfmsub_vf_f16m1_m | ( | ... | ) | __riscv_vfmsub_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f16m2 | ( | ... | ) | __riscv_vfmsub_vf_f16m2_tu(__VA_ARGS__) |
| #define vfmsub_vf_f16m2_m | ( | ... | ) | __riscv_vfmsub_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f16m4 | ( | ... | ) | __riscv_vfmsub_vf_f16m4_tu(__VA_ARGS__) |
| #define vfmsub_vf_f16m4_m | ( | ... | ) | __riscv_vfmsub_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f16m8 | ( | ... | ) | __riscv_vfmsub_vf_f16m8_tu(__VA_ARGS__) |
| #define vfmsub_vf_f16m8_m | ( | ... | ) | __riscv_vfmsub_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f16mf2 | ( | ... | ) | __riscv_vfmsub_vf_f16mf2_tu(__VA_ARGS__) |
| #define vfmsub_vf_f16mf2_m | ( | ... | ) | __riscv_vfmsub_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f16mf4 | ( | ... | ) | __riscv_vfmsub_vf_f16mf4_tu(__VA_ARGS__) |
| #define vfmsub_vf_f16mf4_m | ( | ... | ) | __riscv_vfmsub_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f32m1 | ( | ... | ) | __riscv_vfmsub_vf_f32m1_tu(__VA_ARGS__) |
| #define vfmsub_vf_f32m1_m | ( | ... | ) | __riscv_vfmsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f32m2 | ( | ... | ) | __riscv_vfmsub_vf_f32m2_tu(__VA_ARGS__) |
| #define vfmsub_vf_f32m2_m | ( | ... | ) | __riscv_vfmsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f32m4 | ( | ... | ) | __riscv_vfmsub_vf_f32m4_tu(__VA_ARGS__) |
| #define vfmsub_vf_f32m4_m | ( | ... | ) | __riscv_vfmsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f32m8 | ( | ... | ) | __riscv_vfmsub_vf_f32m8_tu(__VA_ARGS__) |
| #define vfmsub_vf_f32m8_m | ( | ... | ) | __riscv_vfmsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f32mf2 | ( | ... | ) | __riscv_vfmsub_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfmsub_vf_f32mf2_m | ( | ... | ) | __riscv_vfmsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f64m1 | ( | ... | ) | __riscv_vfmsub_vf_f64m1_tu(__VA_ARGS__) |
| #define vfmsub_vf_f64m1_m | ( | ... | ) | __riscv_vfmsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f64m2 | ( | ... | ) | __riscv_vfmsub_vf_f64m2_tu(__VA_ARGS__) |
| #define vfmsub_vf_f64m2_m | ( | ... | ) | __riscv_vfmsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f64m4 | ( | ... | ) | __riscv_vfmsub_vf_f64m4_tu(__VA_ARGS__) |
| #define vfmsub_vf_f64m4_m | ( | ... | ) | __riscv_vfmsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfmsub_vf_f64m8 | ( | ... | ) | __riscv_vfmsub_vf_f64m8_tu(__VA_ARGS__) |
| #define vfmsub_vf_f64m8_m | ( | ... | ) | __riscv_vfmsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f16m1 | ( | ... | ) | __riscv_vfmsub_vv_f16m1_tu(__VA_ARGS__) |
| #define vfmsub_vv_f16m1_m | ( | ... | ) | __riscv_vfmsub_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f16m2 | ( | ... | ) | __riscv_vfmsub_vv_f16m2_tu(__VA_ARGS__) |
| #define vfmsub_vv_f16m2_m | ( | ... | ) | __riscv_vfmsub_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f16m4 | ( | ... | ) | __riscv_vfmsub_vv_f16m4_tu(__VA_ARGS__) |
| #define vfmsub_vv_f16m4_m | ( | ... | ) | __riscv_vfmsub_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f16m8 | ( | ... | ) | __riscv_vfmsub_vv_f16m8_tu(__VA_ARGS__) |
| #define vfmsub_vv_f16m8_m | ( | ... | ) | __riscv_vfmsub_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f16mf2 | ( | ... | ) | __riscv_vfmsub_vv_f16mf2_tu(__VA_ARGS__) |
| #define vfmsub_vv_f16mf2_m | ( | ... | ) | __riscv_vfmsub_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f16mf4 | ( | ... | ) | __riscv_vfmsub_vv_f16mf4_tu(__VA_ARGS__) |
| #define vfmsub_vv_f16mf4_m | ( | ... | ) | __riscv_vfmsub_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f32m1 | ( | ... | ) | __riscv_vfmsub_vv_f32m1_tu(__VA_ARGS__) |
| #define vfmsub_vv_f32m1_m | ( | ... | ) | __riscv_vfmsub_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f32m2 | ( | ... | ) | __riscv_vfmsub_vv_f32m2_tu(__VA_ARGS__) |
| #define vfmsub_vv_f32m2_m | ( | ... | ) | __riscv_vfmsub_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f32m4 | ( | ... | ) | __riscv_vfmsub_vv_f32m4_tu(__VA_ARGS__) |
| #define vfmsub_vv_f32m4_m | ( | ... | ) | __riscv_vfmsub_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f32m8 | ( | ... | ) | __riscv_vfmsub_vv_f32m8_tu(__VA_ARGS__) |
| #define vfmsub_vv_f32m8_m | ( | ... | ) | __riscv_vfmsub_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f32mf2 | ( | ... | ) | __riscv_vfmsub_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfmsub_vv_f32mf2_m | ( | ... | ) | __riscv_vfmsub_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f64m1 | ( | ... | ) | __riscv_vfmsub_vv_f64m1_tu(__VA_ARGS__) |
| #define vfmsub_vv_f64m1_m | ( | ... | ) | __riscv_vfmsub_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f64m2 | ( | ... | ) | __riscv_vfmsub_vv_f64m2_tu(__VA_ARGS__) |
| #define vfmsub_vv_f64m2_m | ( | ... | ) | __riscv_vfmsub_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f64m4 | ( | ... | ) | __riscv_vfmsub_vv_f64m4_tu(__VA_ARGS__) |
| #define vfmsub_vv_f64m4_m | ( | ... | ) | __riscv_vfmsub_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfmsub_vv_f64m8 | ( | ... | ) | __riscv_vfmsub_vv_f64m8_tu(__VA_ARGS__) |
| #define vfmsub_vv_f64m8_m | ( | ... | ) | __riscv_vfmsub_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfmul_vf_f16m1 | ( | ... | ) | __riscv_vfmul_vf_f16m1(__VA_ARGS__) |
| #define vfmul_vf_f16m1_m | ( | ... | ) | __riscv_vfmul_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfmul_vf_f16m2 | ( | ... | ) | __riscv_vfmul_vf_f16m2(__VA_ARGS__) |
| #define vfmul_vf_f16m2_m | ( | ... | ) | __riscv_vfmul_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfmul_vf_f16m4 | ( | ... | ) | __riscv_vfmul_vf_f16m4(__VA_ARGS__) |
| #define vfmul_vf_f16m4_m | ( | ... | ) | __riscv_vfmul_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfmul_vf_f16m8 | ( | ... | ) | __riscv_vfmul_vf_f16m8(__VA_ARGS__) |
| #define vfmul_vf_f16m8_m | ( | ... | ) | __riscv_vfmul_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfmul_vf_f16mf2 | ( | ... | ) | __riscv_vfmul_vf_f16mf2(__VA_ARGS__) |
| #define vfmul_vf_f16mf2_m | ( | ... | ) | __riscv_vfmul_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfmul_vf_f16mf4 | ( | ... | ) | __riscv_vfmul_vf_f16mf4(__VA_ARGS__) |
| #define vfmul_vf_f16mf4_m | ( | ... | ) | __riscv_vfmul_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfmul_vf_f32m1 | ( | ... | ) | __riscv_vfmul_vf_f32m1(__VA_ARGS__) |
| #define vfmul_vf_f32m1_m | ( | ... | ) | __riscv_vfmul_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfmul_vf_f32m2 | ( | ... | ) | __riscv_vfmul_vf_f32m2(__VA_ARGS__) |
| #define vfmul_vf_f32m2_m | ( | ... | ) | __riscv_vfmul_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfmul_vf_f32m4 | ( | ... | ) | __riscv_vfmul_vf_f32m4(__VA_ARGS__) |
| #define vfmul_vf_f32m4_m | ( | ... | ) | __riscv_vfmul_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfmul_vf_f32m8 | ( | ... | ) | __riscv_vfmul_vf_f32m8(__VA_ARGS__) |
| #define vfmul_vf_f32m8_m | ( | ... | ) | __riscv_vfmul_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfmul_vf_f32mf2 | ( | ... | ) | __riscv_vfmul_vf_f32mf2(__VA_ARGS__) |
| #define vfmul_vf_f32mf2_m | ( | ... | ) | __riscv_vfmul_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfmul_vf_f64m1 | ( | ... | ) | __riscv_vfmul_vf_f64m1(__VA_ARGS__) |
| #define vfmul_vf_f64m1_m | ( | ... | ) | __riscv_vfmul_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfmul_vf_f64m2 | ( | ... | ) | __riscv_vfmul_vf_f64m2(__VA_ARGS__) |
| #define vfmul_vf_f64m2_m | ( | ... | ) | __riscv_vfmul_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfmul_vf_f64m4 | ( | ... | ) | __riscv_vfmul_vf_f64m4(__VA_ARGS__) |
| #define vfmul_vf_f64m4_m | ( | ... | ) | __riscv_vfmul_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfmul_vf_f64m8 | ( | ... | ) | __riscv_vfmul_vf_f64m8(__VA_ARGS__) |
| #define vfmul_vf_f64m8_m | ( | ... | ) | __riscv_vfmul_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfmul_vv_f16m1 | ( | ... | ) | __riscv_vfmul_vv_f16m1(__VA_ARGS__) |
| #define vfmul_vv_f16m1_m | ( | ... | ) | __riscv_vfmul_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfmul_vv_f16m2 | ( | ... | ) | __riscv_vfmul_vv_f16m2(__VA_ARGS__) |
| #define vfmul_vv_f16m2_m | ( | ... | ) | __riscv_vfmul_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfmul_vv_f16m4 | ( | ... | ) | __riscv_vfmul_vv_f16m4(__VA_ARGS__) |
| #define vfmul_vv_f16m4_m | ( | ... | ) | __riscv_vfmul_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfmul_vv_f16m8 | ( | ... | ) | __riscv_vfmul_vv_f16m8(__VA_ARGS__) |
| #define vfmul_vv_f16m8_m | ( | ... | ) | __riscv_vfmul_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfmul_vv_f16mf2 | ( | ... | ) | __riscv_vfmul_vv_f16mf2(__VA_ARGS__) |
| #define vfmul_vv_f16mf2_m | ( | ... | ) | __riscv_vfmul_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfmul_vv_f16mf4 | ( | ... | ) | __riscv_vfmul_vv_f16mf4(__VA_ARGS__) |
| #define vfmul_vv_f16mf4_m | ( | ... | ) | __riscv_vfmul_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfmul_vv_f32m1 | ( | ... | ) | __riscv_vfmul_vv_f32m1(__VA_ARGS__) |
| #define vfmul_vv_f32m1_m | ( | ... | ) | __riscv_vfmul_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfmul_vv_f32m2 | ( | ... | ) | __riscv_vfmul_vv_f32m2(__VA_ARGS__) |
| #define vfmul_vv_f32m2_m | ( | ... | ) | __riscv_vfmul_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfmul_vv_f32m4 | ( | ... | ) | __riscv_vfmul_vv_f32m4(__VA_ARGS__) |
| #define vfmul_vv_f32m4_m | ( | ... | ) | __riscv_vfmul_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfmul_vv_f32m8 | ( | ... | ) | __riscv_vfmul_vv_f32m8(__VA_ARGS__) |
| #define vfmul_vv_f32m8_m | ( | ... | ) | __riscv_vfmul_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfmul_vv_f32mf2 | ( | ... | ) | __riscv_vfmul_vv_f32mf2(__VA_ARGS__) |
| #define vfmul_vv_f32mf2_m | ( | ... | ) | __riscv_vfmul_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfmul_vv_f64m1 | ( | ... | ) | __riscv_vfmul_vv_f64m1(__VA_ARGS__) |
| #define vfmul_vv_f64m1_m | ( | ... | ) | __riscv_vfmul_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfmul_vv_f64m2 | ( | ... | ) | __riscv_vfmul_vv_f64m2(__VA_ARGS__) |
| #define vfmul_vv_f64m2_m | ( | ... | ) | __riscv_vfmul_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfmul_vv_f64m4 | ( | ... | ) | __riscv_vfmul_vv_f64m4(__VA_ARGS__) |
| #define vfmul_vv_f64m4_m | ( | ... | ) | __riscv_vfmul_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfmul_vv_f64m8 | ( | ... | ) | __riscv_vfmul_vv_f64m8(__VA_ARGS__) |
| #define vfmul_vv_f64m8_m | ( | ... | ) | __riscv_vfmul_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfmv_f_s_f16m1_f16 | ( | ... | ) | __riscv_vfmv_f_s_f16m1_f16(__VA_ARGS__) |
| #define vfmv_f_s_f16m2_f16 | ( | ... | ) | __riscv_vfmv_f_s_f16m2_f16(__VA_ARGS__) |
| #define vfmv_f_s_f16m4_f16 | ( | ... | ) | __riscv_vfmv_f_s_f16m4_f16(__VA_ARGS__) |
| #define vfmv_f_s_f16m8_f16 | ( | ... | ) | __riscv_vfmv_f_s_f16m8_f16(__VA_ARGS__) |
| #define vfmv_f_s_f16mf2_f16 | ( | ... | ) | __riscv_vfmv_f_s_f16mf2_f16(__VA_ARGS__) |
| #define vfmv_f_s_f16mf4_f16 | ( | ... | ) | __riscv_vfmv_f_s_f16mf4_f16(__VA_ARGS__) |
| #define vfmv_f_s_f32m1_f32 | ( | ... | ) | __riscv_vfmv_f_s_f32m1_f32(__VA_ARGS__) |
| #define vfmv_f_s_f32m2_f32 | ( | ... | ) | __riscv_vfmv_f_s_f32m2_f32(__VA_ARGS__) |
| #define vfmv_f_s_f32m4_f32 | ( | ... | ) | __riscv_vfmv_f_s_f32m4_f32(__VA_ARGS__) |
| #define vfmv_f_s_f32m8_f32 | ( | ... | ) | __riscv_vfmv_f_s_f32m8_f32(__VA_ARGS__) |
| #define vfmv_f_s_f32mf2_f32 | ( | ... | ) | __riscv_vfmv_f_s_f32mf2_f32(__VA_ARGS__) |
| #define vfmv_f_s_f64m1_f64 | ( | ... | ) | __riscv_vfmv_f_s_f64m1_f64(__VA_ARGS__) |
| #define vfmv_f_s_f64m2_f64 | ( | ... | ) | __riscv_vfmv_f_s_f64m2_f64(__VA_ARGS__) |
| #define vfmv_f_s_f64m4_f64 | ( | ... | ) | __riscv_vfmv_f_s_f64m4_f64(__VA_ARGS__) |
| #define vfmv_f_s_f64m8_f64 | ( | ... | ) | __riscv_vfmv_f_s_f64m8_f64(__VA_ARGS__) |
| #define vfmv_s_f_f16m1 | ( | ... | ) | __riscv_vfmv_s_f_f16m1_tu(__VA_ARGS__) |
| #define vfmv_s_f_f16m2 | ( | ... | ) | __riscv_vfmv_s_f_f16m2_tu(__VA_ARGS__) |
| #define vfmv_s_f_f16m4 | ( | ... | ) | __riscv_vfmv_s_f_f16m4_tu(__VA_ARGS__) |
| #define vfmv_s_f_f16m8 | ( | ... | ) | __riscv_vfmv_s_f_f16m8_tu(__VA_ARGS__) |
| #define vfmv_s_f_f16mf2 | ( | ... | ) | __riscv_vfmv_s_f_f16mf2_tu(__VA_ARGS__) |
| #define vfmv_s_f_f16mf4 | ( | ... | ) | __riscv_vfmv_s_f_f16mf4_tu(__VA_ARGS__) |
| #define vfmv_s_f_f32m1 | ( | ... | ) | __riscv_vfmv_s_f_f32m1_tu(__VA_ARGS__) |
| #define vfmv_s_f_f32m2 | ( | ... | ) | __riscv_vfmv_s_f_f32m2_tu(__VA_ARGS__) |
| #define vfmv_s_f_f32m4 | ( | ... | ) | __riscv_vfmv_s_f_f32m4_tu(__VA_ARGS__) |
| #define vfmv_s_f_f32m8 | ( | ... | ) | __riscv_vfmv_s_f_f32m8_tu(__VA_ARGS__) |
| #define vfmv_s_f_f32mf2 | ( | ... | ) | __riscv_vfmv_s_f_f32mf2_tu(__VA_ARGS__) |
| #define vfmv_s_f_f64m1 | ( | ... | ) | __riscv_vfmv_s_f_f64m1_tu(__VA_ARGS__) |
| #define vfmv_s_f_f64m2 | ( | ... | ) | __riscv_vfmv_s_f_f64m2_tu(__VA_ARGS__) |
| #define vfmv_s_f_f64m4 | ( | ... | ) | __riscv_vfmv_s_f_f64m4_tu(__VA_ARGS__) |
| #define vfmv_s_f_f64m8 | ( | ... | ) | __riscv_vfmv_s_f_f64m8_tu(__VA_ARGS__) |
| #define vfmv_v_f_f16m1 | ( | ... | ) | __riscv_vfmv_v_f_f16m1(__VA_ARGS__) |
| #define vfmv_v_f_f16m2 | ( | ... | ) | __riscv_vfmv_v_f_f16m2(__VA_ARGS__) |
| #define vfmv_v_f_f16m4 | ( | ... | ) | __riscv_vfmv_v_f_f16m4(__VA_ARGS__) |
| #define vfmv_v_f_f16m8 | ( | ... | ) | __riscv_vfmv_v_f_f16m8(__VA_ARGS__) |
| #define vfmv_v_f_f16mf2 | ( | ... | ) | __riscv_vfmv_v_f_f16mf2(__VA_ARGS__) |
| #define vfmv_v_f_f16mf4 | ( | ... | ) | __riscv_vfmv_v_f_f16mf4(__VA_ARGS__) |
| #define vfmv_v_f_f32m1 | ( | ... | ) | __riscv_vfmv_v_f_f32m1(__VA_ARGS__) |
| #define vfmv_v_f_f32m2 | ( | ... | ) | __riscv_vfmv_v_f_f32m2(__VA_ARGS__) |
| #define vfmv_v_f_f32m4 | ( | ... | ) | __riscv_vfmv_v_f_f32m4(__VA_ARGS__) |
| #define vfmv_v_f_f32m8 | ( | ... | ) | __riscv_vfmv_v_f_f32m8(__VA_ARGS__) |
| #define vfmv_v_f_f32mf2 | ( | ... | ) | __riscv_vfmv_v_f_f32mf2(__VA_ARGS__) |
| #define vfmv_v_f_f64m1 | ( | ... | ) | __riscv_vfmv_v_f_f64m1(__VA_ARGS__) |
| #define vfmv_v_f_f64m2 | ( | ... | ) | __riscv_vfmv_v_f_f64m2(__VA_ARGS__) |
| #define vfmv_v_f_f64m4 | ( | ... | ) | __riscv_vfmv_v_f_f64m4(__VA_ARGS__) |
| #define vfmv_v_f_f64m8 | ( | ... | ) | __riscv_vfmv_v_f_f64m8(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16m1 | ( | ... | ) | __riscv_vfncvt_f_f_w_f16m1(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16m1_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f16m1_tumu(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16m2 | ( | ... | ) | __riscv_vfncvt_f_f_w_f16m2(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16m2_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f16m2_tumu(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16m4 | ( | ... | ) | __riscv_vfncvt_f_f_w_f16m4(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16m4_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f16m4_tumu(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16mf2 | ( | ... | ) | __riscv_vfncvt_f_f_w_f16mf2(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16mf2_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f16mf2_tumu(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16mf4 | ( | ... | ) | __riscv_vfncvt_f_f_w_f16mf4(__VA_ARGS__) |
| #define vfncvt_f_f_w_f16mf4_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f16mf4_tumu(__VA_ARGS__) |
| #define vfncvt_f_f_w_f32m1 | ( | ... | ) | __riscv_vfncvt_f_f_w_f32m1(__VA_ARGS__) |
| #define vfncvt_f_f_w_f32m1_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f32m1_tumu(__VA_ARGS__) |
| #define vfncvt_f_f_w_f32m2 | ( | ... | ) | __riscv_vfncvt_f_f_w_f32m2(__VA_ARGS__) |
| #define vfncvt_f_f_w_f32m2_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f32m2_tumu(__VA_ARGS__) |
| #define vfncvt_f_f_w_f32m4 | ( | ... | ) | __riscv_vfncvt_f_f_w_f32m4(__VA_ARGS__) |
| #define vfncvt_f_f_w_f32m4_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f32m4_tumu(__VA_ARGS__) |
| #define vfncvt_f_f_w_f32mf2 | ( | ... | ) | __riscv_vfncvt_f_f_w_f32mf2(__VA_ARGS__) |
| #define vfncvt_f_f_w_f32mf2_m | ( | ... | ) | __riscv_vfncvt_f_f_w_f32mf2_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16m1 | ( | ... | ) | __riscv_vfncvt_f_x_w_f16m1(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16m1_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f16m1_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16m2 | ( | ... | ) | __riscv_vfncvt_f_x_w_f16m2(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16m2_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f16m2_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16m4 | ( | ... | ) | __riscv_vfncvt_f_x_w_f16m4(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16m4_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f16m4_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16mf2 | ( | ... | ) | __riscv_vfncvt_f_x_w_f16mf2(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16mf2_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f16mf2_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16mf4 | ( | ... | ) | __riscv_vfncvt_f_x_w_f16mf4(__VA_ARGS__) |
| #define vfncvt_f_x_w_f16mf4_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f16mf4_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f32m1 | ( | ... | ) | __riscv_vfncvt_f_x_w_f32m1(__VA_ARGS__) |
| #define vfncvt_f_x_w_f32m1_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f32m1_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f32m2 | ( | ... | ) | __riscv_vfncvt_f_x_w_f32m2(__VA_ARGS__) |
| #define vfncvt_f_x_w_f32m2_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f32m2_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f32m4 | ( | ... | ) | __riscv_vfncvt_f_x_w_f32m4(__VA_ARGS__) |
| #define vfncvt_f_x_w_f32m4_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f32m4_tumu(__VA_ARGS__) |
| #define vfncvt_f_x_w_f32mf2 | ( | ... | ) | __riscv_vfncvt_f_x_w_f32mf2(__VA_ARGS__) |
| #define vfncvt_f_x_w_f32mf2_m | ( | ... | ) | __riscv_vfncvt_f_x_w_f32mf2_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16m1 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16m1(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16m1_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16m1_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16m2 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16m2(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16m2_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16m2_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16m4 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16m4(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16m4_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16m4_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16mf2 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16mf2(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16mf2_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16mf2_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16mf4 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16mf4(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f16mf4_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f16mf4_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f32m1 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f32m1(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f32m1_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f32m1_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f32m2 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f32m2(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f32m2_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f32m2_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f32m4 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f32m4(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f32m4_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f32m4_tumu(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f32mf2 | ( | ... | ) | __riscv_vfncvt_f_xu_w_f32mf2(__VA_ARGS__) |
| #define vfncvt_f_xu_w_f32mf2_m | ( | ... | ) | __riscv_vfncvt_f_xu_w_f32mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16m1 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16m1(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16m1_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16m1_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16m2 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16m2(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16m2_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16m2_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16m4 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16m4(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16m4_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16m4_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16mf2 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16mf2(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16mf2_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16mf4 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16mf4(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f16mf4_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f16mf4_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f32m1 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f32m1(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f32m1_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f32m1_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f32m2 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f32m2(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f32m2_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f32m2_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f32m4 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f32m4(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f32m4_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f32m4_tumu(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f32mf2 | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f32mf2(__VA_ARGS__) |
| #define vfncvt_rod_f_f_w_f32mf2_m | ( | ... | ) | __riscv_vfncvt_rod_f_f_w_f32mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16m1 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16m1(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16m1_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16m1_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16m2 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16m2(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16m2_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16m2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16m4 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16m4(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16m4_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16m4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16mf2 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16mf2(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16mf2_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16mf4 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16mf4(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i16mf4_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i16mf4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i32m1 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i32m1(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i32m1_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i32m1_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i32m2 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i32m2(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i32m2_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i32m2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i32m4 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i32m4(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i32m4_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i32m4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i32mf2 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i32mf2(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i32mf2_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i32mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8m1 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8m1(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8m1_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8m1_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8m2 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8m2(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8m2_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8m2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8m4 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8m4(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8m4_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8m4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8mf2 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8mf2(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8mf2_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8mf4 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8mf4(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8mf4_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8mf4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8mf8 | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8mf8(__VA_ARGS__) |
| #define vfncvt_rtz_x_f_w_i8mf8_m | ( | ... | ) | __riscv_vfncvt_rtz_x_f_w_i8mf8_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16m1 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16m1(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16m1_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16m1_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16m2 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16m2(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16m2_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16m2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16m4 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16m4(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16m4_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16m4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16mf2 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16mf2(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16mf2_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16mf4 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16mf4(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u16mf4_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u16mf4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u32m1 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u32m1(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u32m1_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u32m1_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u32m2 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u32m2(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u32m2_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u32m2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u32m4 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u32m4(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u32m4_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u32m4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u32mf2 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u32mf2(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u32mf2_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u32mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8m1 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8m1(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8m1_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8m1_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8m2 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8m2(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8m2_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8m2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8m4 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8m4(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8m4_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8m4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8mf2 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8mf2(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8mf2_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8mf2_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8mf4 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8mf4(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8mf4_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8mf4_tumu(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8mf8 | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8mf8(__VA_ARGS__) |
| #define vfncvt_rtz_xu_f_w_u8mf8_m | ( | ... | ) | __riscv_vfncvt_rtz_xu_f_w_u8mf8_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16m1 | ( | ... | ) | __riscv_vfncvt_x_f_w_i16m1(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16m1_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i16m1_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16m2 | ( | ... | ) | __riscv_vfncvt_x_f_w_i16m2(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16m2_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i16m2_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16m4 | ( | ... | ) | __riscv_vfncvt_x_f_w_i16m4(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16m4_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i16m4_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16mf2 | ( | ... | ) | __riscv_vfncvt_x_f_w_i16mf2(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16mf2_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i16mf2_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16mf4 | ( | ... | ) | __riscv_vfncvt_x_f_w_i16mf4(__VA_ARGS__) |
| #define vfncvt_x_f_w_i16mf4_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i16mf4_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i32m1 | ( | ... | ) | __riscv_vfncvt_x_f_w_i32m1(__VA_ARGS__) |
| #define vfncvt_x_f_w_i32m1_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i32m1_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i32m2 | ( | ... | ) | __riscv_vfncvt_x_f_w_i32m2(__VA_ARGS__) |
| #define vfncvt_x_f_w_i32m2_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i32m2_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i32m4 | ( | ... | ) | __riscv_vfncvt_x_f_w_i32m4(__VA_ARGS__) |
| #define vfncvt_x_f_w_i32m4_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i32m4_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i32mf2 | ( | ... | ) | __riscv_vfncvt_x_f_w_i32mf2(__VA_ARGS__) |
| #define vfncvt_x_f_w_i32mf2_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i32mf2_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8m1 | ( | ... | ) | __riscv_vfncvt_x_f_w_i8m1(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8m1_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i8m1_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8m2 | ( | ... | ) | __riscv_vfncvt_x_f_w_i8m2(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8m2_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i8m2_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8m4 | ( | ... | ) | __riscv_vfncvt_x_f_w_i8m4(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8m4_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i8m4_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8mf2 | ( | ... | ) | __riscv_vfncvt_x_f_w_i8mf2(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8mf2_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i8mf2_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8mf4 | ( | ... | ) | __riscv_vfncvt_x_f_w_i8mf4(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8mf4_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i8mf4_tumu(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8mf8 | ( | ... | ) | __riscv_vfncvt_x_f_w_i8mf8(__VA_ARGS__) |
| #define vfncvt_x_f_w_i8mf8_m | ( | ... | ) | __riscv_vfncvt_x_f_w_i8mf8_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16m1 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16m1(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16m1_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16m1_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16m2 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16m2(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16m2_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16m2_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16m4 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16m4(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16m4_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16m4_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16mf2 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16mf2(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16mf2_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16mf2_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16mf4 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16mf4(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u16mf4_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u16mf4_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u32m1 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u32m1(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u32m1_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u32m1_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u32m2 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u32m2(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u32m2_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u32m2_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u32m4 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u32m4(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u32m4_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u32m4_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u32mf2 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u32mf2(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u32mf2_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u32mf2_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8m1 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8m1(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8m1_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8m1_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8m2 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8m2(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8m2_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8m2_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8m4 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8m4(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8m4_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8m4_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8mf2 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8mf2(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8mf2_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8mf2_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8mf4 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8mf4(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8mf4_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8mf4_tumu(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8mf8 | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8mf8(__VA_ARGS__) |
| #define vfncvt_xu_f_w_u8mf8_m | ( | ... | ) | __riscv_vfncvt_xu_f_w_u8mf8_tumu(__VA_ARGS__) |
| #define vfneg_v_f16m1 | ( | ... | ) | __riscv_vfneg_v_f16m1(__VA_ARGS__) |
| #define vfneg_v_f16m1_m | ( | ... | ) | __riscv_vfneg_v_f16m1_tumu(__VA_ARGS__) |
| #define vfneg_v_f16m2 | ( | ... | ) | __riscv_vfneg_v_f16m2(__VA_ARGS__) |
| #define vfneg_v_f16m2_m | ( | ... | ) | __riscv_vfneg_v_f16m2_tumu(__VA_ARGS__) |
| #define vfneg_v_f16m4 | ( | ... | ) | __riscv_vfneg_v_f16m4(__VA_ARGS__) |
| #define vfneg_v_f16m4_m | ( | ... | ) | __riscv_vfneg_v_f16m4_tumu(__VA_ARGS__) |
| #define vfneg_v_f16m8 | ( | ... | ) | __riscv_vfneg_v_f16m8(__VA_ARGS__) |
| #define vfneg_v_f16m8_m | ( | ... | ) | __riscv_vfneg_v_f16m8_tumu(__VA_ARGS__) |
| #define vfneg_v_f16mf2 | ( | ... | ) | __riscv_vfneg_v_f16mf2(__VA_ARGS__) |
| #define vfneg_v_f16mf2_m | ( | ... | ) | __riscv_vfneg_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfneg_v_f16mf4 | ( | ... | ) | __riscv_vfneg_v_f16mf4(__VA_ARGS__) |
| #define vfneg_v_f16mf4_m | ( | ... | ) | __riscv_vfneg_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfneg_v_f32m1 | ( | ... | ) | __riscv_vfneg_v_f32m1(__VA_ARGS__) |
| #define vfneg_v_f32m1_m | ( | ... | ) | __riscv_vfneg_v_f32m1_tumu(__VA_ARGS__) |
| #define vfneg_v_f32m2 | ( | ... | ) | __riscv_vfneg_v_f32m2(__VA_ARGS__) |
| #define vfneg_v_f32m2_m | ( | ... | ) | __riscv_vfneg_v_f32m2_tumu(__VA_ARGS__) |
| #define vfneg_v_f32m4 | ( | ... | ) | __riscv_vfneg_v_f32m4(__VA_ARGS__) |
| #define vfneg_v_f32m4_m | ( | ... | ) | __riscv_vfneg_v_f32m4_tumu(__VA_ARGS__) |
| #define vfneg_v_f32m8 | ( | ... | ) | __riscv_vfneg_v_f32m8(__VA_ARGS__) |
| #define vfneg_v_f32m8_m | ( | ... | ) | __riscv_vfneg_v_f32m8_tumu(__VA_ARGS__) |
| #define vfneg_v_f32mf2 | ( | ... | ) | __riscv_vfneg_v_f32mf2(__VA_ARGS__) |
| #define vfneg_v_f32mf2_m | ( | ... | ) | __riscv_vfneg_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfneg_v_f64m1 | ( | ... | ) | __riscv_vfneg_v_f64m1(__VA_ARGS__) |
| #define vfneg_v_f64m1_m | ( | ... | ) | __riscv_vfneg_v_f64m1_tumu(__VA_ARGS__) |
| #define vfneg_v_f64m2 | ( | ... | ) | __riscv_vfneg_v_f64m2(__VA_ARGS__) |
| #define vfneg_v_f64m2_m | ( | ... | ) | __riscv_vfneg_v_f64m2_tumu(__VA_ARGS__) |
| #define vfneg_v_f64m4 | ( | ... | ) | __riscv_vfneg_v_f64m4(__VA_ARGS__) |
| #define vfneg_v_f64m4_m | ( | ... | ) | __riscv_vfneg_v_f64m4_tumu(__VA_ARGS__) |
| #define vfneg_v_f64m8 | ( | ... | ) | __riscv_vfneg_v_f64m8(__VA_ARGS__) |
| #define vfneg_v_f64m8_m | ( | ... | ) | __riscv_vfneg_v_f64m8_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f16m1 | ( | ... | ) | __riscv_vfnmacc_vf_f16m1_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f16m1_m | ( | ... | ) | __riscv_vfnmacc_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f16m2 | ( | ... | ) | __riscv_vfnmacc_vf_f16m2_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f16m2_m | ( | ... | ) | __riscv_vfnmacc_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f16m4 | ( | ... | ) | __riscv_vfnmacc_vf_f16m4_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f16m4_m | ( | ... | ) | __riscv_vfnmacc_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f16m8 | ( | ... | ) | __riscv_vfnmacc_vf_f16m8_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f16m8_m | ( | ... | ) | __riscv_vfnmacc_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f16mf2 | ( | ... | ) | __riscv_vfnmacc_vf_f16mf2_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f16mf2_m | ( | ... | ) | __riscv_vfnmacc_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f16mf4 | ( | ... | ) | __riscv_vfnmacc_vf_f16mf4_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f16mf4_m | ( | ... | ) | __riscv_vfnmacc_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f32m1 | ( | ... | ) | __riscv_vfnmacc_vf_f32m1_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f32m1_m | ( | ... | ) | __riscv_vfnmacc_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f32m2 | ( | ... | ) | __riscv_vfnmacc_vf_f32m2_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f32m2_m | ( | ... | ) | __riscv_vfnmacc_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f32m4 | ( | ... | ) | __riscv_vfnmacc_vf_f32m4_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f32m4_m | ( | ... | ) | __riscv_vfnmacc_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f32m8 | ( | ... | ) | __riscv_vfnmacc_vf_f32m8_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f32m8_m | ( | ... | ) | __riscv_vfnmacc_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f32mf2 | ( | ... | ) | __riscv_vfnmacc_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f32mf2_m | ( | ... | ) | __riscv_vfnmacc_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f64m1 | ( | ... | ) | __riscv_vfnmacc_vf_f64m1_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f64m1_m | ( | ... | ) | __riscv_vfnmacc_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f64m2 | ( | ... | ) | __riscv_vfnmacc_vf_f64m2_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f64m2_m | ( | ... | ) | __riscv_vfnmacc_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f64m4 | ( | ... | ) | __riscv_vfnmacc_vf_f64m4_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f64m4_m | ( | ... | ) | __riscv_vfnmacc_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfnmacc_vf_f64m8 | ( | ... | ) | __riscv_vfnmacc_vf_f64m8_tu(__VA_ARGS__) |
| #define vfnmacc_vf_f64m8_m | ( | ... | ) | __riscv_vfnmacc_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f16m1 | ( | ... | ) | __riscv_vfnmacc_vv_f16m1_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f16m1_m | ( | ... | ) | __riscv_vfnmacc_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f16m2 | ( | ... | ) | __riscv_vfnmacc_vv_f16m2_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f16m2_m | ( | ... | ) | __riscv_vfnmacc_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f16m4 | ( | ... | ) | __riscv_vfnmacc_vv_f16m4_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f16m4_m | ( | ... | ) | __riscv_vfnmacc_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f16m8 | ( | ... | ) | __riscv_vfnmacc_vv_f16m8_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f16m8_m | ( | ... | ) | __riscv_vfnmacc_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f16mf2 | ( | ... | ) | __riscv_vfnmacc_vv_f16mf2_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f16mf2_m | ( | ... | ) | __riscv_vfnmacc_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f16mf4 | ( | ... | ) | __riscv_vfnmacc_vv_f16mf4_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f16mf4_m | ( | ... | ) | __riscv_vfnmacc_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f32m1 | ( | ... | ) | __riscv_vfnmacc_vv_f32m1_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f32m1_m | ( | ... | ) | __riscv_vfnmacc_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f32m2 | ( | ... | ) | __riscv_vfnmacc_vv_f32m2_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f32m2_m | ( | ... | ) | __riscv_vfnmacc_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f32m4 | ( | ... | ) | __riscv_vfnmacc_vv_f32m4_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f32m4_m | ( | ... | ) | __riscv_vfnmacc_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f32m8 | ( | ... | ) | __riscv_vfnmacc_vv_f32m8_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f32m8_m | ( | ... | ) | __riscv_vfnmacc_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f32mf2 | ( | ... | ) | __riscv_vfnmacc_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f32mf2_m | ( | ... | ) | __riscv_vfnmacc_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f64m1 | ( | ... | ) | __riscv_vfnmacc_vv_f64m1_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f64m1_m | ( | ... | ) | __riscv_vfnmacc_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f64m2 | ( | ... | ) | __riscv_vfnmacc_vv_f64m2_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f64m2_m | ( | ... | ) | __riscv_vfnmacc_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f64m4 | ( | ... | ) | __riscv_vfnmacc_vv_f64m4_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f64m4_m | ( | ... | ) | __riscv_vfnmacc_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfnmacc_vv_f64m8 | ( | ... | ) | __riscv_vfnmacc_vv_f64m8_tu(__VA_ARGS__) |
| #define vfnmacc_vv_f64m8_m | ( | ... | ) | __riscv_vfnmacc_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f16m1 | ( | ... | ) | __riscv_vfnmadd_vf_f16m1_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f16m1_m | ( | ... | ) | __riscv_vfnmadd_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f16m2 | ( | ... | ) | __riscv_vfnmadd_vf_f16m2_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f16m2_m | ( | ... | ) | __riscv_vfnmadd_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f16m4 | ( | ... | ) | __riscv_vfnmadd_vf_f16m4_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f16m4_m | ( | ... | ) | __riscv_vfnmadd_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f16m8 | ( | ... | ) | __riscv_vfnmadd_vf_f16m8_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f16m8_m | ( | ... | ) | __riscv_vfnmadd_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f16mf2 | ( | ... | ) | __riscv_vfnmadd_vf_f16mf2_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f16mf2_m | ( | ... | ) | __riscv_vfnmadd_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f16mf4 | ( | ... | ) | __riscv_vfnmadd_vf_f16mf4_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f16mf4_m | ( | ... | ) | __riscv_vfnmadd_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f32m1 | ( | ... | ) | __riscv_vfnmadd_vf_f32m1_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f32m1_m | ( | ... | ) | __riscv_vfnmadd_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f32m2 | ( | ... | ) | __riscv_vfnmadd_vf_f32m2_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f32m2_m | ( | ... | ) | __riscv_vfnmadd_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f32m4 | ( | ... | ) | __riscv_vfnmadd_vf_f32m4_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f32m4_m | ( | ... | ) | __riscv_vfnmadd_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f32m8 | ( | ... | ) | __riscv_vfnmadd_vf_f32m8_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f32m8_m | ( | ... | ) | __riscv_vfnmadd_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f32mf2 | ( | ... | ) | __riscv_vfnmadd_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f32mf2_m | ( | ... | ) | __riscv_vfnmadd_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f64m1 | ( | ... | ) | __riscv_vfnmadd_vf_f64m1_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f64m1_m | ( | ... | ) | __riscv_vfnmadd_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f64m2 | ( | ... | ) | __riscv_vfnmadd_vf_f64m2_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f64m2_m | ( | ... | ) | __riscv_vfnmadd_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f64m4 | ( | ... | ) | __riscv_vfnmadd_vf_f64m4_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f64m4_m | ( | ... | ) | __riscv_vfnmadd_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfnmadd_vf_f64m8 | ( | ... | ) | __riscv_vfnmadd_vf_f64m8_tu(__VA_ARGS__) |
| #define vfnmadd_vf_f64m8_m | ( | ... | ) | __riscv_vfnmadd_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f16m1 | ( | ... | ) | __riscv_vfnmadd_vv_f16m1_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f16m1_m | ( | ... | ) | __riscv_vfnmadd_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f16m2 | ( | ... | ) | __riscv_vfnmadd_vv_f16m2_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f16m2_m | ( | ... | ) | __riscv_vfnmadd_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f16m4 | ( | ... | ) | __riscv_vfnmadd_vv_f16m4_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f16m4_m | ( | ... | ) | __riscv_vfnmadd_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f16m8 | ( | ... | ) | __riscv_vfnmadd_vv_f16m8_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f16m8_m | ( | ... | ) | __riscv_vfnmadd_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f16mf2 | ( | ... | ) | __riscv_vfnmadd_vv_f16mf2_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f16mf2_m | ( | ... | ) | __riscv_vfnmadd_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f16mf4 | ( | ... | ) | __riscv_vfnmadd_vv_f16mf4_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f16mf4_m | ( | ... | ) | __riscv_vfnmadd_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f32m1 | ( | ... | ) | __riscv_vfnmadd_vv_f32m1_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f32m1_m | ( | ... | ) | __riscv_vfnmadd_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f32m2 | ( | ... | ) | __riscv_vfnmadd_vv_f32m2_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f32m2_m | ( | ... | ) | __riscv_vfnmadd_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f32m4 | ( | ... | ) | __riscv_vfnmadd_vv_f32m4_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f32m4_m | ( | ... | ) | __riscv_vfnmadd_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f32m8 | ( | ... | ) | __riscv_vfnmadd_vv_f32m8_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f32m8_m | ( | ... | ) | __riscv_vfnmadd_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f32mf2 | ( | ... | ) | __riscv_vfnmadd_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f32mf2_m | ( | ... | ) | __riscv_vfnmadd_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f64m1 | ( | ... | ) | __riscv_vfnmadd_vv_f64m1_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f64m1_m | ( | ... | ) | __riscv_vfnmadd_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f64m2 | ( | ... | ) | __riscv_vfnmadd_vv_f64m2_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f64m2_m | ( | ... | ) | __riscv_vfnmadd_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f64m4 | ( | ... | ) | __riscv_vfnmadd_vv_f64m4_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f64m4_m | ( | ... | ) | __riscv_vfnmadd_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfnmadd_vv_f64m8 | ( | ... | ) | __riscv_vfnmadd_vv_f64m8_tu(__VA_ARGS__) |
| #define vfnmadd_vv_f64m8_m | ( | ... | ) | __riscv_vfnmadd_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f16m1 | ( | ... | ) | __riscv_vfnmsac_vf_f16m1_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f16m1_m | ( | ... | ) | __riscv_vfnmsac_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f16m2 | ( | ... | ) | __riscv_vfnmsac_vf_f16m2_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f16m2_m | ( | ... | ) | __riscv_vfnmsac_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f16m4 | ( | ... | ) | __riscv_vfnmsac_vf_f16m4_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f16m4_m | ( | ... | ) | __riscv_vfnmsac_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f16m8 | ( | ... | ) | __riscv_vfnmsac_vf_f16m8_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f16m8_m | ( | ... | ) | __riscv_vfnmsac_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f16mf2 | ( | ... | ) | __riscv_vfnmsac_vf_f16mf2_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f16mf2_m | ( | ... | ) | __riscv_vfnmsac_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f16mf4 | ( | ... | ) | __riscv_vfnmsac_vf_f16mf4_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f16mf4_m | ( | ... | ) | __riscv_vfnmsac_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f32m1 | ( | ... | ) | __riscv_vfnmsac_vf_f32m1_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f32m1_m | ( | ... | ) | __riscv_vfnmsac_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f32m2 | ( | ... | ) | __riscv_vfnmsac_vf_f32m2_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f32m2_m | ( | ... | ) | __riscv_vfnmsac_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f32m4 | ( | ... | ) | __riscv_vfnmsac_vf_f32m4_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f32m4_m | ( | ... | ) | __riscv_vfnmsac_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f32m8 | ( | ... | ) | __riscv_vfnmsac_vf_f32m8_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f32m8_m | ( | ... | ) | __riscv_vfnmsac_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f32mf2 | ( | ... | ) | __riscv_vfnmsac_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f32mf2_m | ( | ... | ) | __riscv_vfnmsac_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f64m1 | ( | ... | ) | __riscv_vfnmsac_vf_f64m1_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f64m1_m | ( | ... | ) | __riscv_vfnmsac_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f64m2 | ( | ... | ) | __riscv_vfnmsac_vf_f64m2_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f64m2_m | ( | ... | ) | __riscv_vfnmsac_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f64m4 | ( | ... | ) | __riscv_vfnmsac_vf_f64m4_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f64m4_m | ( | ... | ) | __riscv_vfnmsac_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfnmsac_vf_f64m8 | ( | ... | ) | __riscv_vfnmsac_vf_f64m8_tu(__VA_ARGS__) |
| #define vfnmsac_vf_f64m8_m | ( | ... | ) | __riscv_vfnmsac_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f16m1 | ( | ... | ) | __riscv_vfnmsac_vv_f16m1_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f16m1_m | ( | ... | ) | __riscv_vfnmsac_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f16m2 | ( | ... | ) | __riscv_vfnmsac_vv_f16m2_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f16m2_m | ( | ... | ) | __riscv_vfnmsac_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f16m4 | ( | ... | ) | __riscv_vfnmsac_vv_f16m4_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f16m4_m | ( | ... | ) | __riscv_vfnmsac_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f16m8 | ( | ... | ) | __riscv_vfnmsac_vv_f16m8_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f16m8_m | ( | ... | ) | __riscv_vfnmsac_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f16mf2 | ( | ... | ) | __riscv_vfnmsac_vv_f16mf2_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f16mf2_m | ( | ... | ) | __riscv_vfnmsac_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f16mf4 | ( | ... | ) | __riscv_vfnmsac_vv_f16mf4_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f16mf4_m | ( | ... | ) | __riscv_vfnmsac_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f32m1 | ( | ... | ) | __riscv_vfnmsac_vv_f32m1_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f32m1_m | ( | ... | ) | __riscv_vfnmsac_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f32m2 | ( | ... | ) | __riscv_vfnmsac_vv_f32m2_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f32m2_m | ( | ... | ) | __riscv_vfnmsac_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f32m4 | ( | ... | ) | __riscv_vfnmsac_vv_f32m4_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f32m4_m | ( | ... | ) | __riscv_vfnmsac_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f32m8 | ( | ... | ) | __riscv_vfnmsac_vv_f32m8_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f32m8_m | ( | ... | ) | __riscv_vfnmsac_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f32mf2 | ( | ... | ) | __riscv_vfnmsac_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f32mf2_m | ( | ... | ) | __riscv_vfnmsac_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f64m1 | ( | ... | ) | __riscv_vfnmsac_vv_f64m1_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f64m1_m | ( | ... | ) | __riscv_vfnmsac_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f64m2 | ( | ... | ) | __riscv_vfnmsac_vv_f64m2_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f64m2_m | ( | ... | ) | __riscv_vfnmsac_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f64m4 | ( | ... | ) | __riscv_vfnmsac_vv_f64m4_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f64m4_m | ( | ... | ) | __riscv_vfnmsac_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfnmsac_vv_f64m8 | ( | ... | ) | __riscv_vfnmsac_vv_f64m8_tu(__VA_ARGS__) |
| #define vfnmsac_vv_f64m8_m | ( | ... | ) | __riscv_vfnmsac_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f16m1 | ( | ... | ) | __riscv_vfnmsub_vf_f16m1_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f16m1_m | ( | ... | ) | __riscv_vfnmsub_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f16m2 | ( | ... | ) | __riscv_vfnmsub_vf_f16m2_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f16m2_m | ( | ... | ) | __riscv_vfnmsub_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f16m4 | ( | ... | ) | __riscv_vfnmsub_vf_f16m4_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f16m4_m | ( | ... | ) | __riscv_vfnmsub_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f16m8 | ( | ... | ) | __riscv_vfnmsub_vf_f16m8_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f16m8_m | ( | ... | ) | __riscv_vfnmsub_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f16mf2 | ( | ... | ) | __riscv_vfnmsub_vf_f16mf2_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f16mf2_m | ( | ... | ) | __riscv_vfnmsub_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f16mf4 | ( | ... | ) | __riscv_vfnmsub_vf_f16mf4_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f16mf4_m | ( | ... | ) | __riscv_vfnmsub_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f32m1 | ( | ... | ) | __riscv_vfnmsub_vf_f32m1_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f32m1_m | ( | ... | ) | __riscv_vfnmsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f32m2 | ( | ... | ) | __riscv_vfnmsub_vf_f32m2_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f32m2_m | ( | ... | ) | __riscv_vfnmsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f32m4 | ( | ... | ) | __riscv_vfnmsub_vf_f32m4_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f32m4_m | ( | ... | ) | __riscv_vfnmsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f32m8 | ( | ... | ) | __riscv_vfnmsub_vf_f32m8_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f32m8_m | ( | ... | ) | __riscv_vfnmsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f32mf2 | ( | ... | ) | __riscv_vfnmsub_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f32mf2_m | ( | ... | ) | __riscv_vfnmsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f64m1 | ( | ... | ) | __riscv_vfnmsub_vf_f64m1_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f64m1_m | ( | ... | ) | __riscv_vfnmsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f64m2 | ( | ... | ) | __riscv_vfnmsub_vf_f64m2_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f64m2_m | ( | ... | ) | __riscv_vfnmsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f64m4 | ( | ... | ) | __riscv_vfnmsub_vf_f64m4_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f64m4_m | ( | ... | ) | __riscv_vfnmsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfnmsub_vf_f64m8 | ( | ... | ) | __riscv_vfnmsub_vf_f64m8_tu(__VA_ARGS__) |
| #define vfnmsub_vf_f64m8_m | ( | ... | ) | __riscv_vfnmsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f16m1 | ( | ... | ) | __riscv_vfnmsub_vv_f16m1_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f16m1_m | ( | ... | ) | __riscv_vfnmsub_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f16m2 | ( | ... | ) | __riscv_vfnmsub_vv_f16m2_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f16m2_m | ( | ... | ) | __riscv_vfnmsub_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f16m4 | ( | ... | ) | __riscv_vfnmsub_vv_f16m4_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f16m4_m | ( | ... | ) | __riscv_vfnmsub_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f16m8 | ( | ... | ) | __riscv_vfnmsub_vv_f16m8_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f16m8_m | ( | ... | ) | __riscv_vfnmsub_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f16mf2 | ( | ... | ) | __riscv_vfnmsub_vv_f16mf2_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f16mf2_m | ( | ... | ) | __riscv_vfnmsub_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f16mf4 | ( | ... | ) | __riscv_vfnmsub_vv_f16mf4_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f16mf4_m | ( | ... | ) | __riscv_vfnmsub_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f32m1 | ( | ... | ) | __riscv_vfnmsub_vv_f32m1_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f32m1_m | ( | ... | ) | __riscv_vfnmsub_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f32m2 | ( | ... | ) | __riscv_vfnmsub_vv_f32m2_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f32m2_m | ( | ... | ) | __riscv_vfnmsub_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f32m4 | ( | ... | ) | __riscv_vfnmsub_vv_f32m4_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f32m4_m | ( | ... | ) | __riscv_vfnmsub_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f32m8 | ( | ... | ) | __riscv_vfnmsub_vv_f32m8_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f32m8_m | ( | ... | ) | __riscv_vfnmsub_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f32mf2 | ( | ... | ) | __riscv_vfnmsub_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f32mf2_m | ( | ... | ) | __riscv_vfnmsub_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f64m1 | ( | ... | ) | __riscv_vfnmsub_vv_f64m1_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f64m1_m | ( | ... | ) | __riscv_vfnmsub_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f64m2 | ( | ... | ) | __riscv_vfnmsub_vv_f64m2_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f64m2_m | ( | ... | ) | __riscv_vfnmsub_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f64m4 | ( | ... | ) | __riscv_vfnmsub_vv_f64m4_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f64m4_m | ( | ... | ) | __riscv_vfnmsub_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfnmsub_vv_f64m8 | ( | ... | ) | __riscv_vfnmsub_vv_f64m8_tu(__VA_ARGS__) |
| #define vfnmsub_vv_f64m8_m | ( | ... | ) | __riscv_vfnmsub_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f16m1 | ( | ... | ) | __riscv_vfrdiv_vf_f16m1(__VA_ARGS__) |
| #define vfrdiv_vf_f16m1_m | ( | ... | ) | __riscv_vfrdiv_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f16m2 | ( | ... | ) | __riscv_vfrdiv_vf_f16m2(__VA_ARGS__) |
| #define vfrdiv_vf_f16m2_m | ( | ... | ) | __riscv_vfrdiv_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f16m4 | ( | ... | ) | __riscv_vfrdiv_vf_f16m4(__VA_ARGS__) |
| #define vfrdiv_vf_f16m4_m | ( | ... | ) | __riscv_vfrdiv_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f16m8 | ( | ... | ) | __riscv_vfrdiv_vf_f16m8(__VA_ARGS__) |
| #define vfrdiv_vf_f16m8_m | ( | ... | ) | __riscv_vfrdiv_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f16mf2 | ( | ... | ) | __riscv_vfrdiv_vf_f16mf2(__VA_ARGS__) |
| #define vfrdiv_vf_f16mf2_m | ( | ... | ) | __riscv_vfrdiv_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f16mf4 | ( | ... | ) | __riscv_vfrdiv_vf_f16mf4(__VA_ARGS__) |
| #define vfrdiv_vf_f16mf4_m | ( | ... | ) | __riscv_vfrdiv_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f32m1 | ( | ... | ) | __riscv_vfrdiv_vf_f32m1(__VA_ARGS__) |
| #define vfrdiv_vf_f32m1_m | ( | ... | ) | __riscv_vfrdiv_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f32m2 | ( | ... | ) | __riscv_vfrdiv_vf_f32m2(__VA_ARGS__) |
| #define vfrdiv_vf_f32m2_m | ( | ... | ) | __riscv_vfrdiv_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f32m4 | ( | ... | ) | __riscv_vfrdiv_vf_f32m4(__VA_ARGS__) |
| #define vfrdiv_vf_f32m4_m | ( | ... | ) | __riscv_vfrdiv_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f32m8 | ( | ... | ) | __riscv_vfrdiv_vf_f32m8(__VA_ARGS__) |
| #define vfrdiv_vf_f32m8_m | ( | ... | ) | __riscv_vfrdiv_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f32mf2 | ( | ... | ) | __riscv_vfrdiv_vf_f32mf2(__VA_ARGS__) |
| #define vfrdiv_vf_f32mf2_m | ( | ... | ) | __riscv_vfrdiv_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f64m1 | ( | ... | ) | __riscv_vfrdiv_vf_f64m1(__VA_ARGS__) |
| #define vfrdiv_vf_f64m1_m | ( | ... | ) | __riscv_vfrdiv_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f64m2 | ( | ... | ) | __riscv_vfrdiv_vf_f64m2(__VA_ARGS__) |
| #define vfrdiv_vf_f64m2_m | ( | ... | ) | __riscv_vfrdiv_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f64m4 | ( | ... | ) | __riscv_vfrdiv_vf_f64m4(__VA_ARGS__) |
| #define vfrdiv_vf_f64m4_m | ( | ... | ) | __riscv_vfrdiv_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfrdiv_vf_f64m8 | ( | ... | ) | __riscv_vfrdiv_vf_f64m8(__VA_ARGS__) |
| #define vfrdiv_vf_f64m8_m | ( | ... | ) | __riscv_vfrdiv_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfrec7_v_f16m1 | ( | ... | ) | __riscv_vfrec7_v_f16m1(__VA_ARGS__) |
| #define vfrec7_v_f16m1_m | ( | ... | ) | __riscv_vfrec7_v_f16m1_tumu(__VA_ARGS__) |
| #define vfrec7_v_f16m2 | ( | ... | ) | __riscv_vfrec7_v_f16m2(__VA_ARGS__) |
| #define vfrec7_v_f16m2_m | ( | ... | ) | __riscv_vfrec7_v_f16m2_tumu(__VA_ARGS__) |
| #define vfrec7_v_f16m4 | ( | ... | ) | __riscv_vfrec7_v_f16m4(__VA_ARGS__) |
| #define vfrec7_v_f16m4_m | ( | ... | ) | __riscv_vfrec7_v_f16m4_tumu(__VA_ARGS__) |
| #define vfrec7_v_f16m8 | ( | ... | ) | __riscv_vfrec7_v_f16m8(__VA_ARGS__) |
| #define vfrec7_v_f16m8_m | ( | ... | ) | __riscv_vfrec7_v_f16m8_tumu(__VA_ARGS__) |
| #define vfrec7_v_f16mf2 | ( | ... | ) | __riscv_vfrec7_v_f16mf2(__VA_ARGS__) |
| #define vfrec7_v_f16mf2_m | ( | ... | ) | __riscv_vfrec7_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfrec7_v_f16mf4 | ( | ... | ) | __riscv_vfrec7_v_f16mf4(__VA_ARGS__) |
| #define vfrec7_v_f16mf4_m | ( | ... | ) | __riscv_vfrec7_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfrec7_v_f32m1 | ( | ... | ) | __riscv_vfrec7_v_f32m1(__VA_ARGS__) |
| #define vfrec7_v_f32m1_m | ( | ... | ) | __riscv_vfrec7_v_f32m1_tumu(__VA_ARGS__) |
| #define vfrec7_v_f32m2 | ( | ... | ) | __riscv_vfrec7_v_f32m2(__VA_ARGS__) |
| #define vfrec7_v_f32m2_m | ( | ... | ) | __riscv_vfrec7_v_f32m2_tumu(__VA_ARGS__) |
| #define vfrec7_v_f32m4 | ( | ... | ) | __riscv_vfrec7_v_f32m4(__VA_ARGS__) |
| #define vfrec7_v_f32m4_m | ( | ... | ) | __riscv_vfrec7_v_f32m4_tumu(__VA_ARGS__) |
| #define vfrec7_v_f32m8 | ( | ... | ) | __riscv_vfrec7_v_f32m8(__VA_ARGS__) |
| #define vfrec7_v_f32m8_m | ( | ... | ) | __riscv_vfrec7_v_f32m8_tumu(__VA_ARGS__) |
| #define vfrec7_v_f32mf2 | ( | ... | ) | __riscv_vfrec7_v_f32mf2(__VA_ARGS__) |
| #define vfrec7_v_f32mf2_m | ( | ... | ) | __riscv_vfrec7_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfrec7_v_f64m1 | ( | ... | ) | __riscv_vfrec7_v_f64m1(__VA_ARGS__) |
| #define vfrec7_v_f64m1_m | ( | ... | ) | __riscv_vfrec7_v_f64m1_tumu(__VA_ARGS__) |
| #define vfrec7_v_f64m2 | ( | ... | ) | __riscv_vfrec7_v_f64m2(__VA_ARGS__) |
| #define vfrec7_v_f64m2_m | ( | ... | ) | __riscv_vfrec7_v_f64m2_tumu(__VA_ARGS__) |
| #define vfrec7_v_f64m4 | ( | ... | ) | __riscv_vfrec7_v_f64m4(__VA_ARGS__) |
| #define vfrec7_v_f64m4_m | ( | ... | ) | __riscv_vfrec7_v_f64m4_tumu(__VA_ARGS__) |
| #define vfrec7_v_f64m8 | ( | ... | ) | __riscv_vfrec7_v_f64m8(__VA_ARGS__) |
| #define vfrec7_v_f64m8_m | ( | ... | ) | __riscv_vfrec7_v_f64m8_tumu(__VA_ARGS__) |
| #define vfredmax_vs_f16m1_f16m1 | ( | ... | ) | __riscv_vfredmax_vs_f16m1_f16m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f16m1_f16m1_m | ( | ... | ) | __riscv_vfredmax_vs_f16m1_f16m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f16m2_f16m1 | ( | ... | ) | __riscv_vfredmax_vs_f16m2_f16m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f16m2_f16m1_m | ( | ... | ) | __riscv_vfredmax_vs_f16m2_f16m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f16m4_f16m1 | ( | ... | ) | __riscv_vfredmax_vs_f16m4_f16m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f16m4_f16m1_m | ( | ... | ) | __riscv_vfredmax_vs_f16m4_f16m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f16m8_f16m1 | ( | ... | ) | __riscv_vfredmax_vs_f16m8_f16m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f16m8_f16m1_m | ( | ... | ) | __riscv_vfredmax_vs_f16m8_f16m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f16mf2_f16m1 | ( | ... | ) | __riscv_vfredmax_vs_f16mf2_f16m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f16mf2_f16m1_m | ( | ... | ) | __riscv_vfredmax_vs_f16mf2_f16m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f16mf4_f16m1 | ( | ... | ) | __riscv_vfredmax_vs_f16mf4_f16m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f16mf4_f16m1_m | ( | ... | ) | __riscv_vfredmax_vs_f16mf4_f16m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f32m1_f32m1 | ( | ... | ) | __riscv_vfredmax_vs_f32m1_f32m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f32m1_f32m1_m | ( | ... | ) | __riscv_vfredmax_vs_f32m1_f32m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f32m2_f32m1 | ( | ... | ) | __riscv_vfredmax_vs_f32m2_f32m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f32m2_f32m1_m | ( | ... | ) | __riscv_vfredmax_vs_f32m2_f32m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f32m4_f32m1 | ( | ... | ) | __riscv_vfredmax_vs_f32m4_f32m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f32m4_f32m1_m | ( | ... | ) | __riscv_vfredmax_vs_f32m4_f32m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f32m8_f32m1 | ( | ... | ) | __riscv_vfredmax_vs_f32m8_f32m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f32m8_f32m1_m | ( | ... | ) | __riscv_vfredmax_vs_f32m8_f32m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f32mf2_f32m1 | ( | ... | ) | __riscv_vfredmax_vs_f32mf2_f32m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f32mf2_f32m1_m | ( | ... | ) | __riscv_vfredmax_vs_f32mf2_f32m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f64m1_f64m1 | ( | ... | ) | __riscv_vfredmax_vs_f64m1_f64m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f64m1_f64m1_m | ( | ... | ) | __riscv_vfredmax_vs_f64m1_f64m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f64m2_f64m1 | ( | ... | ) | __riscv_vfredmax_vs_f64m2_f64m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f64m2_f64m1_m | ( | ... | ) | __riscv_vfredmax_vs_f64m2_f64m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f64m4_f64m1 | ( | ... | ) | __riscv_vfredmax_vs_f64m4_f64m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f64m4_f64m1_m | ( | ... | ) | __riscv_vfredmax_vs_f64m4_f64m1_tum(__VA_ARGS__) |
| #define vfredmax_vs_f64m8_f64m1 | ( | ... | ) | __riscv_vfredmax_vs_f64m8_f64m1_tu(__VA_ARGS__) |
| #define vfredmax_vs_f64m8_f64m1_m | ( | ... | ) | __riscv_vfredmax_vs_f64m8_f64m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f16m1_f16m1 | ( | ... | ) | __riscv_vfredmin_vs_f16m1_f16m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f16m1_f16m1_m | ( | ... | ) | __riscv_vfredmin_vs_f16m1_f16m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f16m2_f16m1 | ( | ... | ) | __riscv_vfredmin_vs_f16m2_f16m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f16m2_f16m1_m | ( | ... | ) | __riscv_vfredmin_vs_f16m2_f16m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f16m4_f16m1 | ( | ... | ) | __riscv_vfredmin_vs_f16m4_f16m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f16m4_f16m1_m | ( | ... | ) | __riscv_vfredmin_vs_f16m4_f16m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f16m8_f16m1 | ( | ... | ) | __riscv_vfredmin_vs_f16m8_f16m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f16m8_f16m1_m | ( | ... | ) | __riscv_vfredmin_vs_f16m8_f16m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f16mf2_f16m1 | ( | ... | ) | __riscv_vfredmin_vs_f16mf2_f16m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f16mf2_f16m1_m | ( | ... | ) | __riscv_vfredmin_vs_f16mf2_f16m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f16mf4_f16m1 | ( | ... | ) | __riscv_vfredmin_vs_f16mf4_f16m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f16mf4_f16m1_m | ( | ... | ) | __riscv_vfredmin_vs_f16mf4_f16m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f32m1_f32m1 | ( | ... | ) | __riscv_vfredmin_vs_f32m1_f32m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f32m1_f32m1_m | ( | ... | ) | __riscv_vfredmin_vs_f32m1_f32m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f32m2_f32m1 | ( | ... | ) | __riscv_vfredmin_vs_f32m2_f32m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f32m2_f32m1_m | ( | ... | ) | __riscv_vfredmin_vs_f32m2_f32m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f32m4_f32m1 | ( | ... | ) | __riscv_vfredmin_vs_f32m4_f32m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f32m4_f32m1_m | ( | ... | ) | __riscv_vfredmin_vs_f32m4_f32m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f32m8_f32m1 | ( | ... | ) | __riscv_vfredmin_vs_f32m8_f32m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f32m8_f32m1_m | ( | ... | ) | __riscv_vfredmin_vs_f32m8_f32m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f32mf2_f32m1 | ( | ... | ) | __riscv_vfredmin_vs_f32mf2_f32m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f32mf2_f32m1_m | ( | ... | ) | __riscv_vfredmin_vs_f32mf2_f32m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f64m1_f64m1 | ( | ... | ) | __riscv_vfredmin_vs_f64m1_f64m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f64m1_f64m1_m | ( | ... | ) | __riscv_vfredmin_vs_f64m1_f64m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f64m2_f64m1 | ( | ... | ) | __riscv_vfredmin_vs_f64m2_f64m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f64m2_f64m1_m | ( | ... | ) | __riscv_vfredmin_vs_f64m2_f64m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f64m4_f64m1 | ( | ... | ) | __riscv_vfredmin_vs_f64m4_f64m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f64m4_f64m1_m | ( | ... | ) | __riscv_vfredmin_vs_f64m4_f64m1_tum(__VA_ARGS__) |
| #define vfredmin_vs_f64m8_f64m1 | ( | ... | ) | __riscv_vfredmin_vs_f64m8_f64m1_tu(__VA_ARGS__) |
| #define vfredmin_vs_f64m8_f64m1_m | ( | ... | ) | __riscv_vfredmin_vs_f64m8_f64m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f16m1_f16m1 | ( | ... | ) | __riscv_vfredosum_vs_f16m1_f16m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f16m1_f16m1_m | ( | ... | ) | __riscv_vfredosum_vs_f16m1_f16m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f16m2_f16m1 | ( | ... | ) | __riscv_vfredosum_vs_f16m2_f16m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f16m2_f16m1_m | ( | ... | ) | __riscv_vfredosum_vs_f16m2_f16m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f16m4_f16m1 | ( | ... | ) | __riscv_vfredosum_vs_f16m4_f16m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f16m4_f16m1_m | ( | ... | ) | __riscv_vfredosum_vs_f16m4_f16m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f16m8_f16m1 | ( | ... | ) | __riscv_vfredosum_vs_f16m8_f16m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f16m8_f16m1_m | ( | ... | ) | __riscv_vfredosum_vs_f16m8_f16m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f16mf2_f16m1 | ( | ... | ) | __riscv_vfredosum_vs_f16mf2_f16m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f16mf2_f16m1_m | ( | ... | ) | __riscv_vfredosum_vs_f16mf2_f16m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f16mf4_f16m1 | ( | ... | ) | __riscv_vfredosum_vs_f16mf4_f16m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f16mf4_f16m1_m | ( | ... | ) | __riscv_vfredosum_vs_f16mf4_f16m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f32m1_f32m1 | ( | ... | ) | __riscv_vfredosum_vs_f32m1_f32m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f32m1_f32m1_m | ( | ... | ) | __riscv_vfredosum_vs_f32m1_f32m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f32m2_f32m1 | ( | ... | ) | __riscv_vfredosum_vs_f32m2_f32m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f32m2_f32m1_m | ( | ... | ) | __riscv_vfredosum_vs_f32m2_f32m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f32m4_f32m1 | ( | ... | ) | __riscv_vfredosum_vs_f32m4_f32m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f32m4_f32m1_m | ( | ... | ) | __riscv_vfredosum_vs_f32m4_f32m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f32m8_f32m1 | ( | ... | ) | __riscv_vfredosum_vs_f32m8_f32m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f32m8_f32m1_m | ( | ... | ) | __riscv_vfredosum_vs_f32m8_f32m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f32mf2_f32m1 | ( | ... | ) | __riscv_vfredosum_vs_f32mf2_f32m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f32mf2_f32m1_m | ( | ... | ) | __riscv_vfredosum_vs_f32mf2_f32m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f64m1_f64m1 | ( | ... | ) | __riscv_vfredosum_vs_f64m1_f64m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f64m1_f64m1_m | ( | ... | ) | __riscv_vfredosum_vs_f64m1_f64m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f64m2_f64m1 | ( | ... | ) | __riscv_vfredosum_vs_f64m2_f64m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f64m2_f64m1_m | ( | ... | ) | __riscv_vfredosum_vs_f64m2_f64m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f64m4_f64m1 | ( | ... | ) | __riscv_vfredosum_vs_f64m4_f64m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f64m4_f64m1_m | ( | ... | ) | __riscv_vfredosum_vs_f64m4_f64m1_tum(__VA_ARGS__) |
| #define vfredosum_vs_f64m8_f64m1 | ( | ... | ) | __riscv_vfredosum_vs_f64m8_f64m1_tu(__VA_ARGS__) |
| #define vfredosum_vs_f64m8_f64m1_m | ( | ... | ) | __riscv_vfredosum_vs_f64m8_f64m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f16m1_f16m1 | ( | ... | ) | __riscv_vfredusum_vs_f16m1_f16m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f16m1_f16m1_m | ( | ... | ) | __riscv_vfredusum_vs_f16m1_f16m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f16m2_f16m1 | ( | ... | ) | __riscv_vfredusum_vs_f16m2_f16m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f16m2_f16m1_m | ( | ... | ) | __riscv_vfredusum_vs_f16m2_f16m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f16m4_f16m1 | ( | ... | ) | __riscv_vfredusum_vs_f16m4_f16m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f16m4_f16m1_m | ( | ... | ) | __riscv_vfredusum_vs_f16m4_f16m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f16m8_f16m1 | ( | ... | ) | __riscv_vfredusum_vs_f16m8_f16m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f16m8_f16m1_m | ( | ... | ) | __riscv_vfredusum_vs_f16m8_f16m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f16mf2_f16m1 | ( | ... | ) | __riscv_vfredusum_vs_f16mf2_f16m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f16mf2_f16m1_m | ( | ... | ) | __riscv_vfredusum_vs_f16mf2_f16m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f16mf4_f16m1 | ( | ... | ) | __riscv_vfredusum_vs_f16mf4_f16m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f16mf4_f16m1_m | ( | ... | ) | __riscv_vfredusum_vs_f16mf4_f16m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f32m1_f32m1 | ( | ... | ) | __riscv_vfredusum_vs_f32m1_f32m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f32m1_f32m1_m | ( | ... | ) | __riscv_vfredusum_vs_f32m1_f32m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f32m2_f32m1 | ( | ... | ) | __riscv_vfredusum_vs_f32m2_f32m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f32m2_f32m1_m | ( | ... | ) | __riscv_vfredusum_vs_f32m2_f32m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f32m4_f32m1 | ( | ... | ) | __riscv_vfredusum_vs_f32m4_f32m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f32m4_f32m1_m | ( | ... | ) | __riscv_vfredusum_vs_f32m4_f32m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f32m8_f32m1 | ( | ... | ) | __riscv_vfredusum_vs_f32m8_f32m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f32m8_f32m1_m | ( | ... | ) | __riscv_vfredusum_vs_f32m8_f32m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f32mf2_f32m1 | ( | ... | ) | __riscv_vfredusum_vs_f32mf2_f32m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f32mf2_f32m1_m | ( | ... | ) | __riscv_vfredusum_vs_f32mf2_f32m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f64m1_f64m1 | ( | ... | ) | __riscv_vfredusum_vs_f64m1_f64m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f64m1_f64m1_m | ( | ... | ) | __riscv_vfredusum_vs_f64m1_f64m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f64m2_f64m1 | ( | ... | ) | __riscv_vfredusum_vs_f64m2_f64m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f64m2_f64m1_m | ( | ... | ) | __riscv_vfredusum_vs_f64m2_f64m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f64m4_f64m1 | ( | ... | ) | __riscv_vfredusum_vs_f64m4_f64m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f64m4_f64m1_m | ( | ... | ) | __riscv_vfredusum_vs_f64m4_f64m1_tum(__VA_ARGS__) |
| #define vfredusum_vs_f64m8_f64m1 | ( | ... | ) | __riscv_vfredusum_vs_f64m8_f64m1_tu(__VA_ARGS__) |
| #define vfredusum_vs_f64m8_f64m1_m | ( | ... | ) | __riscv_vfredusum_vs_f64m8_f64m1_tum(__VA_ARGS__) |
| #define vfrsqrt7_v_f16m1 | ( | ... | ) | __riscv_vfrsqrt7_v_f16m1(__VA_ARGS__) |
| #define vfrsqrt7_v_f16m1_m | ( | ... | ) | __riscv_vfrsqrt7_v_f16m1_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f16m2 | ( | ... | ) | __riscv_vfrsqrt7_v_f16m2(__VA_ARGS__) |
| #define vfrsqrt7_v_f16m2_m | ( | ... | ) | __riscv_vfrsqrt7_v_f16m2_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f16m4 | ( | ... | ) | __riscv_vfrsqrt7_v_f16m4(__VA_ARGS__) |
| #define vfrsqrt7_v_f16m4_m | ( | ... | ) | __riscv_vfrsqrt7_v_f16m4_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f16m8 | ( | ... | ) | __riscv_vfrsqrt7_v_f16m8(__VA_ARGS__) |
| #define vfrsqrt7_v_f16m8_m | ( | ... | ) | __riscv_vfrsqrt7_v_f16m8_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f16mf2 | ( | ... | ) | __riscv_vfrsqrt7_v_f16mf2(__VA_ARGS__) |
| #define vfrsqrt7_v_f16mf2_m | ( | ... | ) | __riscv_vfrsqrt7_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f16mf4 | ( | ... | ) | __riscv_vfrsqrt7_v_f16mf4(__VA_ARGS__) |
| #define vfrsqrt7_v_f16mf4_m | ( | ... | ) | __riscv_vfrsqrt7_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f32m1 | ( | ... | ) | __riscv_vfrsqrt7_v_f32m1(__VA_ARGS__) |
| #define vfrsqrt7_v_f32m1_m | ( | ... | ) | __riscv_vfrsqrt7_v_f32m1_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f32m2 | ( | ... | ) | __riscv_vfrsqrt7_v_f32m2(__VA_ARGS__) |
| #define vfrsqrt7_v_f32m2_m | ( | ... | ) | __riscv_vfrsqrt7_v_f32m2_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f32m4 | ( | ... | ) | __riscv_vfrsqrt7_v_f32m4(__VA_ARGS__) |
| #define vfrsqrt7_v_f32m4_m | ( | ... | ) | __riscv_vfrsqrt7_v_f32m4_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f32m8 | ( | ... | ) | __riscv_vfrsqrt7_v_f32m8(__VA_ARGS__) |
| #define vfrsqrt7_v_f32m8_m | ( | ... | ) | __riscv_vfrsqrt7_v_f32m8_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f32mf2 | ( | ... | ) | __riscv_vfrsqrt7_v_f32mf2(__VA_ARGS__) |
| #define vfrsqrt7_v_f32mf2_m | ( | ... | ) | __riscv_vfrsqrt7_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f64m1 | ( | ... | ) | __riscv_vfrsqrt7_v_f64m1(__VA_ARGS__) |
| #define vfrsqrt7_v_f64m1_m | ( | ... | ) | __riscv_vfrsqrt7_v_f64m1_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f64m2 | ( | ... | ) | __riscv_vfrsqrt7_v_f64m2(__VA_ARGS__) |
| #define vfrsqrt7_v_f64m2_m | ( | ... | ) | __riscv_vfrsqrt7_v_f64m2_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f64m4 | ( | ... | ) | __riscv_vfrsqrt7_v_f64m4(__VA_ARGS__) |
| #define vfrsqrt7_v_f64m4_m | ( | ... | ) | __riscv_vfrsqrt7_v_f64m4_tumu(__VA_ARGS__) |
| #define vfrsqrt7_v_f64m8 | ( | ... | ) | __riscv_vfrsqrt7_v_f64m8(__VA_ARGS__) |
| #define vfrsqrt7_v_f64m8_m | ( | ... | ) | __riscv_vfrsqrt7_v_f64m8_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f16m1 | ( | ... | ) | __riscv_vfrsub_vf_f16m1(__VA_ARGS__) |
| #define vfrsub_vf_f16m1_m | ( | ... | ) | __riscv_vfrsub_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f16m2 | ( | ... | ) | __riscv_vfrsub_vf_f16m2(__VA_ARGS__) |
| #define vfrsub_vf_f16m2_m | ( | ... | ) | __riscv_vfrsub_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f16m4 | ( | ... | ) | __riscv_vfrsub_vf_f16m4(__VA_ARGS__) |
| #define vfrsub_vf_f16m4_m | ( | ... | ) | __riscv_vfrsub_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f16m8 | ( | ... | ) | __riscv_vfrsub_vf_f16m8(__VA_ARGS__) |
| #define vfrsub_vf_f16m8_m | ( | ... | ) | __riscv_vfrsub_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f16mf2 | ( | ... | ) | __riscv_vfrsub_vf_f16mf2(__VA_ARGS__) |
| #define vfrsub_vf_f16mf2_m | ( | ... | ) | __riscv_vfrsub_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f16mf4 | ( | ... | ) | __riscv_vfrsub_vf_f16mf4(__VA_ARGS__) |
| #define vfrsub_vf_f16mf4_m | ( | ... | ) | __riscv_vfrsub_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f32m1 | ( | ... | ) | __riscv_vfrsub_vf_f32m1(__VA_ARGS__) |
| #define vfrsub_vf_f32m1_m | ( | ... | ) | __riscv_vfrsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f32m2 | ( | ... | ) | __riscv_vfrsub_vf_f32m2(__VA_ARGS__) |
| #define vfrsub_vf_f32m2_m | ( | ... | ) | __riscv_vfrsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f32m4 | ( | ... | ) | __riscv_vfrsub_vf_f32m4(__VA_ARGS__) |
| #define vfrsub_vf_f32m4_m | ( | ... | ) | __riscv_vfrsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f32m8 | ( | ... | ) | __riscv_vfrsub_vf_f32m8(__VA_ARGS__) |
| #define vfrsub_vf_f32m8_m | ( | ... | ) | __riscv_vfrsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f32mf2 | ( | ... | ) | __riscv_vfrsub_vf_f32mf2(__VA_ARGS__) |
| #define vfrsub_vf_f32mf2_m | ( | ... | ) | __riscv_vfrsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f64m1 | ( | ... | ) | __riscv_vfrsub_vf_f64m1(__VA_ARGS__) |
| #define vfrsub_vf_f64m1_m | ( | ... | ) | __riscv_vfrsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f64m2 | ( | ... | ) | __riscv_vfrsub_vf_f64m2(__VA_ARGS__) |
| #define vfrsub_vf_f64m2_m | ( | ... | ) | __riscv_vfrsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f64m4 | ( | ... | ) | __riscv_vfrsub_vf_f64m4(__VA_ARGS__) |
| #define vfrsub_vf_f64m4_m | ( | ... | ) | __riscv_vfrsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfrsub_vf_f64m8 | ( | ... | ) | __riscv_vfrsub_vf_f64m8(__VA_ARGS__) |
| #define vfrsub_vf_f64m8_m | ( | ... | ) | __riscv_vfrsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f16m1 | ( | ... | ) | __riscv_vfsgnj_vf_f16m1(__VA_ARGS__) |
| #define vfsgnj_vf_f16m1_m | ( | ... | ) | __riscv_vfsgnj_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f16m2 | ( | ... | ) | __riscv_vfsgnj_vf_f16m2(__VA_ARGS__) |
| #define vfsgnj_vf_f16m2_m | ( | ... | ) | __riscv_vfsgnj_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f16m4 | ( | ... | ) | __riscv_vfsgnj_vf_f16m4(__VA_ARGS__) |
| #define vfsgnj_vf_f16m4_m | ( | ... | ) | __riscv_vfsgnj_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f16m8 | ( | ... | ) | __riscv_vfsgnj_vf_f16m8(__VA_ARGS__) |
| #define vfsgnj_vf_f16m8_m | ( | ... | ) | __riscv_vfsgnj_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f16mf2 | ( | ... | ) | __riscv_vfsgnj_vf_f16mf2(__VA_ARGS__) |
| #define vfsgnj_vf_f16mf2_m | ( | ... | ) | __riscv_vfsgnj_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f16mf4 | ( | ... | ) | __riscv_vfsgnj_vf_f16mf4(__VA_ARGS__) |
| #define vfsgnj_vf_f16mf4_m | ( | ... | ) | __riscv_vfsgnj_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f32m1 | ( | ... | ) | __riscv_vfsgnj_vf_f32m1(__VA_ARGS__) |
| #define vfsgnj_vf_f32m1_m | ( | ... | ) | __riscv_vfsgnj_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f32m2 | ( | ... | ) | __riscv_vfsgnj_vf_f32m2(__VA_ARGS__) |
| #define vfsgnj_vf_f32m2_m | ( | ... | ) | __riscv_vfsgnj_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f32m4 | ( | ... | ) | __riscv_vfsgnj_vf_f32m4(__VA_ARGS__) |
| #define vfsgnj_vf_f32m4_m | ( | ... | ) | __riscv_vfsgnj_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f32m8 | ( | ... | ) | __riscv_vfsgnj_vf_f32m8(__VA_ARGS__) |
| #define vfsgnj_vf_f32m8_m | ( | ... | ) | __riscv_vfsgnj_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f32mf2 | ( | ... | ) | __riscv_vfsgnj_vf_f32mf2(__VA_ARGS__) |
| #define vfsgnj_vf_f32mf2_m | ( | ... | ) | __riscv_vfsgnj_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f64m1 | ( | ... | ) | __riscv_vfsgnj_vf_f64m1(__VA_ARGS__) |
| #define vfsgnj_vf_f64m1_m | ( | ... | ) | __riscv_vfsgnj_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f64m2 | ( | ... | ) | __riscv_vfsgnj_vf_f64m2(__VA_ARGS__) |
| #define vfsgnj_vf_f64m2_m | ( | ... | ) | __riscv_vfsgnj_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f64m4 | ( | ... | ) | __riscv_vfsgnj_vf_f64m4(__VA_ARGS__) |
| #define vfsgnj_vf_f64m4_m | ( | ... | ) | __riscv_vfsgnj_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfsgnj_vf_f64m8 | ( | ... | ) | __riscv_vfsgnj_vf_f64m8(__VA_ARGS__) |
| #define vfsgnj_vf_f64m8_m | ( | ... | ) | __riscv_vfsgnj_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f16m1 | ( | ... | ) | __riscv_vfsgnj_vv_f16m1(__VA_ARGS__) |
| #define vfsgnj_vv_f16m1_m | ( | ... | ) | __riscv_vfsgnj_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f16m2 | ( | ... | ) | __riscv_vfsgnj_vv_f16m2(__VA_ARGS__) |
| #define vfsgnj_vv_f16m2_m | ( | ... | ) | __riscv_vfsgnj_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f16m4 | ( | ... | ) | __riscv_vfsgnj_vv_f16m4(__VA_ARGS__) |
| #define vfsgnj_vv_f16m4_m | ( | ... | ) | __riscv_vfsgnj_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f16m8 | ( | ... | ) | __riscv_vfsgnj_vv_f16m8(__VA_ARGS__) |
| #define vfsgnj_vv_f16m8_m | ( | ... | ) | __riscv_vfsgnj_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f16mf2 | ( | ... | ) | __riscv_vfsgnj_vv_f16mf2(__VA_ARGS__) |
| #define vfsgnj_vv_f16mf2_m | ( | ... | ) | __riscv_vfsgnj_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f16mf4 | ( | ... | ) | __riscv_vfsgnj_vv_f16mf4(__VA_ARGS__) |
| #define vfsgnj_vv_f16mf4_m | ( | ... | ) | __riscv_vfsgnj_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f32m1 | ( | ... | ) | __riscv_vfsgnj_vv_f32m1(__VA_ARGS__) |
| #define vfsgnj_vv_f32m1_m | ( | ... | ) | __riscv_vfsgnj_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f32m2 | ( | ... | ) | __riscv_vfsgnj_vv_f32m2(__VA_ARGS__) |
| #define vfsgnj_vv_f32m2_m | ( | ... | ) | __riscv_vfsgnj_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f32m4 | ( | ... | ) | __riscv_vfsgnj_vv_f32m4(__VA_ARGS__) |
| #define vfsgnj_vv_f32m4_m | ( | ... | ) | __riscv_vfsgnj_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f32m8 | ( | ... | ) | __riscv_vfsgnj_vv_f32m8(__VA_ARGS__) |
| #define vfsgnj_vv_f32m8_m | ( | ... | ) | __riscv_vfsgnj_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f32mf2 | ( | ... | ) | __riscv_vfsgnj_vv_f32mf2(__VA_ARGS__) |
| #define vfsgnj_vv_f32mf2_m | ( | ... | ) | __riscv_vfsgnj_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f64m1 | ( | ... | ) | __riscv_vfsgnj_vv_f64m1(__VA_ARGS__) |
| #define vfsgnj_vv_f64m1_m | ( | ... | ) | __riscv_vfsgnj_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f64m2 | ( | ... | ) | __riscv_vfsgnj_vv_f64m2(__VA_ARGS__) |
| #define vfsgnj_vv_f64m2_m | ( | ... | ) | __riscv_vfsgnj_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f64m4 | ( | ... | ) | __riscv_vfsgnj_vv_f64m4(__VA_ARGS__) |
| #define vfsgnj_vv_f64m4_m | ( | ... | ) | __riscv_vfsgnj_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfsgnj_vv_f64m8 | ( | ... | ) | __riscv_vfsgnj_vv_f64m8(__VA_ARGS__) |
| #define vfsgnj_vv_f64m8_m | ( | ... | ) | __riscv_vfsgnj_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f16m1 | ( | ... | ) | __riscv_vfsgnjn_vf_f16m1(__VA_ARGS__) |
| #define vfsgnjn_vf_f16m1_m | ( | ... | ) | __riscv_vfsgnjn_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f16m2 | ( | ... | ) | __riscv_vfsgnjn_vf_f16m2(__VA_ARGS__) |
| #define vfsgnjn_vf_f16m2_m | ( | ... | ) | __riscv_vfsgnjn_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f16m4 | ( | ... | ) | __riscv_vfsgnjn_vf_f16m4(__VA_ARGS__) |
| #define vfsgnjn_vf_f16m4_m | ( | ... | ) | __riscv_vfsgnjn_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f16m8 | ( | ... | ) | __riscv_vfsgnjn_vf_f16m8(__VA_ARGS__) |
| #define vfsgnjn_vf_f16m8_m | ( | ... | ) | __riscv_vfsgnjn_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f16mf2 | ( | ... | ) | __riscv_vfsgnjn_vf_f16mf2(__VA_ARGS__) |
| #define vfsgnjn_vf_f16mf2_m | ( | ... | ) | __riscv_vfsgnjn_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f16mf4 | ( | ... | ) | __riscv_vfsgnjn_vf_f16mf4(__VA_ARGS__) |
| #define vfsgnjn_vf_f16mf4_m | ( | ... | ) | __riscv_vfsgnjn_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f32m1 | ( | ... | ) | __riscv_vfsgnjn_vf_f32m1(__VA_ARGS__) |
| #define vfsgnjn_vf_f32m1_m | ( | ... | ) | __riscv_vfsgnjn_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f32m2 | ( | ... | ) | __riscv_vfsgnjn_vf_f32m2(__VA_ARGS__) |
| #define vfsgnjn_vf_f32m2_m | ( | ... | ) | __riscv_vfsgnjn_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f32m4 | ( | ... | ) | __riscv_vfsgnjn_vf_f32m4(__VA_ARGS__) |
| #define vfsgnjn_vf_f32m4_m | ( | ... | ) | __riscv_vfsgnjn_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f32m8 | ( | ... | ) | __riscv_vfsgnjn_vf_f32m8(__VA_ARGS__) |
| #define vfsgnjn_vf_f32m8_m | ( | ... | ) | __riscv_vfsgnjn_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f32mf2 | ( | ... | ) | __riscv_vfsgnjn_vf_f32mf2(__VA_ARGS__) |
| #define vfsgnjn_vf_f32mf2_m | ( | ... | ) | __riscv_vfsgnjn_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f64m1 | ( | ... | ) | __riscv_vfsgnjn_vf_f64m1(__VA_ARGS__) |
| #define vfsgnjn_vf_f64m1_m | ( | ... | ) | __riscv_vfsgnjn_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f64m2 | ( | ... | ) | __riscv_vfsgnjn_vf_f64m2(__VA_ARGS__) |
| #define vfsgnjn_vf_f64m2_m | ( | ... | ) | __riscv_vfsgnjn_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f64m4 | ( | ... | ) | __riscv_vfsgnjn_vf_f64m4(__VA_ARGS__) |
| #define vfsgnjn_vf_f64m4_m | ( | ... | ) | __riscv_vfsgnjn_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfsgnjn_vf_f64m8 | ( | ... | ) | __riscv_vfsgnjn_vf_f64m8(__VA_ARGS__) |
| #define vfsgnjn_vf_f64m8_m | ( | ... | ) | __riscv_vfsgnjn_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f16m1 | ( | ... | ) | __riscv_vfsgnjn_vv_f16m1(__VA_ARGS__) |
| #define vfsgnjn_vv_f16m1_m | ( | ... | ) | __riscv_vfsgnjn_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f16m2 | ( | ... | ) | __riscv_vfsgnjn_vv_f16m2(__VA_ARGS__) |
| #define vfsgnjn_vv_f16m2_m | ( | ... | ) | __riscv_vfsgnjn_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f16m4 | ( | ... | ) | __riscv_vfsgnjn_vv_f16m4(__VA_ARGS__) |
| #define vfsgnjn_vv_f16m4_m | ( | ... | ) | __riscv_vfsgnjn_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f16m8 | ( | ... | ) | __riscv_vfsgnjn_vv_f16m8(__VA_ARGS__) |
| #define vfsgnjn_vv_f16m8_m | ( | ... | ) | __riscv_vfsgnjn_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f16mf2 | ( | ... | ) | __riscv_vfsgnjn_vv_f16mf2(__VA_ARGS__) |
| #define vfsgnjn_vv_f16mf2_m | ( | ... | ) | __riscv_vfsgnjn_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f16mf4 | ( | ... | ) | __riscv_vfsgnjn_vv_f16mf4(__VA_ARGS__) |
| #define vfsgnjn_vv_f16mf4_m | ( | ... | ) | __riscv_vfsgnjn_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f32m1 | ( | ... | ) | __riscv_vfsgnjn_vv_f32m1(__VA_ARGS__) |
| #define vfsgnjn_vv_f32m1_m | ( | ... | ) | __riscv_vfsgnjn_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f32m2 | ( | ... | ) | __riscv_vfsgnjn_vv_f32m2(__VA_ARGS__) |
| #define vfsgnjn_vv_f32m2_m | ( | ... | ) | __riscv_vfsgnjn_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f32m4 | ( | ... | ) | __riscv_vfsgnjn_vv_f32m4(__VA_ARGS__) |
| #define vfsgnjn_vv_f32m4_m | ( | ... | ) | __riscv_vfsgnjn_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f32m8 | ( | ... | ) | __riscv_vfsgnjn_vv_f32m8(__VA_ARGS__) |
| #define vfsgnjn_vv_f32m8_m | ( | ... | ) | __riscv_vfsgnjn_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f32mf2 | ( | ... | ) | __riscv_vfsgnjn_vv_f32mf2(__VA_ARGS__) |
| #define vfsgnjn_vv_f32mf2_m | ( | ... | ) | __riscv_vfsgnjn_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f64m1 | ( | ... | ) | __riscv_vfsgnjn_vv_f64m1(__VA_ARGS__) |
| #define vfsgnjn_vv_f64m1_m | ( | ... | ) | __riscv_vfsgnjn_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f64m2 | ( | ... | ) | __riscv_vfsgnjn_vv_f64m2(__VA_ARGS__) |
| #define vfsgnjn_vv_f64m2_m | ( | ... | ) | __riscv_vfsgnjn_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f64m4 | ( | ... | ) | __riscv_vfsgnjn_vv_f64m4(__VA_ARGS__) |
| #define vfsgnjn_vv_f64m4_m | ( | ... | ) | __riscv_vfsgnjn_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfsgnjn_vv_f64m8 | ( | ... | ) | __riscv_vfsgnjn_vv_f64m8(__VA_ARGS__) |
| #define vfsgnjn_vv_f64m8_m | ( | ... | ) | __riscv_vfsgnjn_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f16m1 | ( | ... | ) | __riscv_vfsgnjx_vf_f16m1(__VA_ARGS__) |
| #define vfsgnjx_vf_f16m1_m | ( | ... | ) | __riscv_vfsgnjx_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f16m2 | ( | ... | ) | __riscv_vfsgnjx_vf_f16m2(__VA_ARGS__) |
| #define vfsgnjx_vf_f16m2_m | ( | ... | ) | __riscv_vfsgnjx_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f16m4 | ( | ... | ) | __riscv_vfsgnjx_vf_f16m4(__VA_ARGS__) |
| #define vfsgnjx_vf_f16m4_m | ( | ... | ) | __riscv_vfsgnjx_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f16m8 | ( | ... | ) | __riscv_vfsgnjx_vf_f16m8(__VA_ARGS__) |
| #define vfsgnjx_vf_f16m8_m | ( | ... | ) | __riscv_vfsgnjx_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f16mf2 | ( | ... | ) | __riscv_vfsgnjx_vf_f16mf2(__VA_ARGS__) |
| #define vfsgnjx_vf_f16mf2_m | ( | ... | ) | __riscv_vfsgnjx_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f16mf4 | ( | ... | ) | __riscv_vfsgnjx_vf_f16mf4(__VA_ARGS__) |
| #define vfsgnjx_vf_f16mf4_m | ( | ... | ) | __riscv_vfsgnjx_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f32m1 | ( | ... | ) | __riscv_vfsgnjx_vf_f32m1(__VA_ARGS__) |
| #define vfsgnjx_vf_f32m1_m | ( | ... | ) | __riscv_vfsgnjx_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f32m2 | ( | ... | ) | __riscv_vfsgnjx_vf_f32m2(__VA_ARGS__) |
| #define vfsgnjx_vf_f32m2_m | ( | ... | ) | __riscv_vfsgnjx_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f32m4 | ( | ... | ) | __riscv_vfsgnjx_vf_f32m4(__VA_ARGS__) |
| #define vfsgnjx_vf_f32m4_m | ( | ... | ) | __riscv_vfsgnjx_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f32m8 | ( | ... | ) | __riscv_vfsgnjx_vf_f32m8(__VA_ARGS__) |
| #define vfsgnjx_vf_f32m8_m | ( | ... | ) | __riscv_vfsgnjx_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f32mf2 | ( | ... | ) | __riscv_vfsgnjx_vf_f32mf2(__VA_ARGS__) |
| #define vfsgnjx_vf_f32mf2_m | ( | ... | ) | __riscv_vfsgnjx_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f64m1 | ( | ... | ) | __riscv_vfsgnjx_vf_f64m1(__VA_ARGS__) |
| #define vfsgnjx_vf_f64m1_m | ( | ... | ) | __riscv_vfsgnjx_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f64m2 | ( | ... | ) | __riscv_vfsgnjx_vf_f64m2(__VA_ARGS__) |
| #define vfsgnjx_vf_f64m2_m | ( | ... | ) | __riscv_vfsgnjx_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f64m4 | ( | ... | ) | __riscv_vfsgnjx_vf_f64m4(__VA_ARGS__) |
| #define vfsgnjx_vf_f64m4_m | ( | ... | ) | __riscv_vfsgnjx_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfsgnjx_vf_f64m8 | ( | ... | ) | __riscv_vfsgnjx_vf_f64m8(__VA_ARGS__) |
| #define vfsgnjx_vf_f64m8_m | ( | ... | ) | __riscv_vfsgnjx_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f16m1 | ( | ... | ) | __riscv_vfsgnjx_vv_f16m1(__VA_ARGS__) |
| #define vfsgnjx_vv_f16m1_m | ( | ... | ) | __riscv_vfsgnjx_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f16m2 | ( | ... | ) | __riscv_vfsgnjx_vv_f16m2(__VA_ARGS__) |
| #define vfsgnjx_vv_f16m2_m | ( | ... | ) | __riscv_vfsgnjx_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f16m4 | ( | ... | ) | __riscv_vfsgnjx_vv_f16m4(__VA_ARGS__) |
| #define vfsgnjx_vv_f16m4_m | ( | ... | ) | __riscv_vfsgnjx_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f16m8 | ( | ... | ) | __riscv_vfsgnjx_vv_f16m8(__VA_ARGS__) |
| #define vfsgnjx_vv_f16m8_m | ( | ... | ) | __riscv_vfsgnjx_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f16mf2 | ( | ... | ) | __riscv_vfsgnjx_vv_f16mf2(__VA_ARGS__) |
| #define vfsgnjx_vv_f16mf2_m | ( | ... | ) | __riscv_vfsgnjx_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f16mf4 | ( | ... | ) | __riscv_vfsgnjx_vv_f16mf4(__VA_ARGS__) |
| #define vfsgnjx_vv_f16mf4_m | ( | ... | ) | __riscv_vfsgnjx_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f32m1 | ( | ... | ) | __riscv_vfsgnjx_vv_f32m1(__VA_ARGS__) |
| #define vfsgnjx_vv_f32m1_m | ( | ... | ) | __riscv_vfsgnjx_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f32m2 | ( | ... | ) | __riscv_vfsgnjx_vv_f32m2(__VA_ARGS__) |
| #define vfsgnjx_vv_f32m2_m | ( | ... | ) | __riscv_vfsgnjx_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f32m4 | ( | ... | ) | __riscv_vfsgnjx_vv_f32m4(__VA_ARGS__) |
| #define vfsgnjx_vv_f32m4_m | ( | ... | ) | __riscv_vfsgnjx_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f32m8 | ( | ... | ) | __riscv_vfsgnjx_vv_f32m8(__VA_ARGS__) |
| #define vfsgnjx_vv_f32m8_m | ( | ... | ) | __riscv_vfsgnjx_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f32mf2 | ( | ... | ) | __riscv_vfsgnjx_vv_f32mf2(__VA_ARGS__) |
| #define vfsgnjx_vv_f32mf2_m | ( | ... | ) | __riscv_vfsgnjx_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f64m1 | ( | ... | ) | __riscv_vfsgnjx_vv_f64m1(__VA_ARGS__) |
| #define vfsgnjx_vv_f64m1_m | ( | ... | ) | __riscv_vfsgnjx_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f64m2 | ( | ... | ) | __riscv_vfsgnjx_vv_f64m2(__VA_ARGS__) |
| #define vfsgnjx_vv_f64m2_m | ( | ... | ) | __riscv_vfsgnjx_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f64m4 | ( | ... | ) | __riscv_vfsgnjx_vv_f64m4(__VA_ARGS__) |
| #define vfsgnjx_vv_f64m4_m | ( | ... | ) | __riscv_vfsgnjx_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfsgnjx_vv_f64m8 | ( | ... | ) | __riscv_vfsgnjx_vv_f64m8(__VA_ARGS__) |
| #define vfsgnjx_vv_f64m8_m | ( | ... | ) | __riscv_vfsgnjx_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f16m1 | ( | ... | ) | __riscv_vfslide1down_vf_f16m1(__VA_ARGS__) |
| #define vfslide1down_vf_f16m1_m | ( | ... | ) | __riscv_vfslide1down_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f16m2 | ( | ... | ) | __riscv_vfslide1down_vf_f16m2(__VA_ARGS__) |
| #define vfslide1down_vf_f16m2_m | ( | ... | ) | __riscv_vfslide1down_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f16m4 | ( | ... | ) | __riscv_vfslide1down_vf_f16m4(__VA_ARGS__) |
| #define vfslide1down_vf_f16m4_m | ( | ... | ) | __riscv_vfslide1down_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f16m8 | ( | ... | ) | __riscv_vfslide1down_vf_f16m8(__VA_ARGS__) |
| #define vfslide1down_vf_f16m8_m | ( | ... | ) | __riscv_vfslide1down_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f16mf2 | ( | ... | ) | __riscv_vfslide1down_vf_f16mf2(__VA_ARGS__) |
| #define vfslide1down_vf_f16mf2_m | ( | ... | ) | __riscv_vfslide1down_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f16mf4 | ( | ... | ) | __riscv_vfslide1down_vf_f16mf4(__VA_ARGS__) |
| #define vfslide1down_vf_f16mf4_m | ( | ... | ) | __riscv_vfslide1down_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f32m1 | ( | ... | ) | __riscv_vfslide1down_vf_f32m1(__VA_ARGS__) |
| #define vfslide1down_vf_f32m1_m | ( | ... | ) | __riscv_vfslide1down_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f32m2 | ( | ... | ) | __riscv_vfslide1down_vf_f32m2(__VA_ARGS__) |
| #define vfslide1down_vf_f32m2_m | ( | ... | ) | __riscv_vfslide1down_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f32m4 | ( | ... | ) | __riscv_vfslide1down_vf_f32m4(__VA_ARGS__) |
| #define vfslide1down_vf_f32m4_m | ( | ... | ) | __riscv_vfslide1down_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f32m8 | ( | ... | ) | __riscv_vfslide1down_vf_f32m8(__VA_ARGS__) |
| #define vfslide1down_vf_f32m8_m | ( | ... | ) | __riscv_vfslide1down_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f32mf2 | ( | ... | ) | __riscv_vfslide1down_vf_f32mf2(__VA_ARGS__) |
| #define vfslide1down_vf_f32mf2_m | ( | ... | ) | __riscv_vfslide1down_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f64m1 | ( | ... | ) | __riscv_vfslide1down_vf_f64m1(__VA_ARGS__) |
| #define vfslide1down_vf_f64m1_m | ( | ... | ) | __riscv_vfslide1down_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f64m2 | ( | ... | ) | __riscv_vfslide1down_vf_f64m2(__VA_ARGS__) |
| #define vfslide1down_vf_f64m2_m | ( | ... | ) | __riscv_vfslide1down_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f64m4 | ( | ... | ) | __riscv_vfslide1down_vf_f64m4(__VA_ARGS__) |
| #define vfslide1down_vf_f64m4_m | ( | ... | ) | __riscv_vfslide1down_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfslide1down_vf_f64m8 | ( | ... | ) | __riscv_vfslide1down_vf_f64m8(__VA_ARGS__) |
| #define vfslide1down_vf_f64m8_m | ( | ... | ) | __riscv_vfslide1down_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f16m1 | ( | ... | ) | __riscv_vfslide1up_vf_f16m1(__VA_ARGS__) |
| #define vfslide1up_vf_f16m1_m | ( | ... | ) | __riscv_vfslide1up_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f16m2 | ( | ... | ) | __riscv_vfslide1up_vf_f16m2(__VA_ARGS__) |
| #define vfslide1up_vf_f16m2_m | ( | ... | ) | __riscv_vfslide1up_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f16m4 | ( | ... | ) | __riscv_vfslide1up_vf_f16m4(__VA_ARGS__) |
| #define vfslide1up_vf_f16m4_m | ( | ... | ) | __riscv_vfslide1up_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f16m8 | ( | ... | ) | __riscv_vfslide1up_vf_f16m8(__VA_ARGS__) |
| #define vfslide1up_vf_f16m8_m | ( | ... | ) | __riscv_vfslide1up_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f16mf2 | ( | ... | ) | __riscv_vfslide1up_vf_f16mf2(__VA_ARGS__) |
| #define vfslide1up_vf_f16mf2_m | ( | ... | ) | __riscv_vfslide1up_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f16mf4 | ( | ... | ) | __riscv_vfslide1up_vf_f16mf4(__VA_ARGS__) |
| #define vfslide1up_vf_f16mf4_m | ( | ... | ) | __riscv_vfslide1up_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f32m1 | ( | ... | ) | __riscv_vfslide1up_vf_f32m1(__VA_ARGS__) |
| #define vfslide1up_vf_f32m1_m | ( | ... | ) | __riscv_vfslide1up_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f32m2 | ( | ... | ) | __riscv_vfslide1up_vf_f32m2(__VA_ARGS__) |
| #define vfslide1up_vf_f32m2_m | ( | ... | ) | __riscv_vfslide1up_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f32m4 | ( | ... | ) | __riscv_vfslide1up_vf_f32m4(__VA_ARGS__) |
| #define vfslide1up_vf_f32m4_m | ( | ... | ) | __riscv_vfslide1up_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f32m8 | ( | ... | ) | __riscv_vfslide1up_vf_f32m8(__VA_ARGS__) |
| #define vfslide1up_vf_f32m8_m | ( | ... | ) | __riscv_vfslide1up_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f32mf2 | ( | ... | ) | __riscv_vfslide1up_vf_f32mf2(__VA_ARGS__) |
| #define vfslide1up_vf_f32mf2_m | ( | ... | ) | __riscv_vfslide1up_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f64m1 | ( | ... | ) | __riscv_vfslide1up_vf_f64m1(__VA_ARGS__) |
| #define vfslide1up_vf_f64m1_m | ( | ... | ) | __riscv_vfslide1up_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f64m2 | ( | ... | ) | __riscv_vfslide1up_vf_f64m2(__VA_ARGS__) |
| #define vfslide1up_vf_f64m2_m | ( | ... | ) | __riscv_vfslide1up_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f64m4 | ( | ... | ) | __riscv_vfslide1up_vf_f64m4(__VA_ARGS__) |
| #define vfslide1up_vf_f64m4_m | ( | ... | ) | __riscv_vfslide1up_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfslide1up_vf_f64m8 | ( | ... | ) | __riscv_vfslide1up_vf_f64m8(__VA_ARGS__) |
| #define vfslide1up_vf_f64m8_m | ( | ... | ) | __riscv_vfslide1up_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f16m1 | ( | ... | ) | __riscv_vfsqrt_v_f16m1(__VA_ARGS__) |
| #define vfsqrt_v_f16m1_m | ( | ... | ) | __riscv_vfsqrt_v_f16m1_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f16m2 | ( | ... | ) | __riscv_vfsqrt_v_f16m2(__VA_ARGS__) |
| #define vfsqrt_v_f16m2_m | ( | ... | ) | __riscv_vfsqrt_v_f16m2_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f16m4 | ( | ... | ) | __riscv_vfsqrt_v_f16m4(__VA_ARGS__) |
| #define vfsqrt_v_f16m4_m | ( | ... | ) | __riscv_vfsqrt_v_f16m4_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f16m8 | ( | ... | ) | __riscv_vfsqrt_v_f16m8(__VA_ARGS__) |
| #define vfsqrt_v_f16m8_m | ( | ... | ) | __riscv_vfsqrt_v_f16m8_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f16mf2 | ( | ... | ) | __riscv_vfsqrt_v_f16mf2(__VA_ARGS__) |
| #define vfsqrt_v_f16mf2_m | ( | ... | ) | __riscv_vfsqrt_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f16mf4 | ( | ... | ) | __riscv_vfsqrt_v_f16mf4(__VA_ARGS__) |
| #define vfsqrt_v_f16mf4_m | ( | ... | ) | __riscv_vfsqrt_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f32m1 | ( | ... | ) | __riscv_vfsqrt_v_f32m1(__VA_ARGS__) |
| #define vfsqrt_v_f32m1_m | ( | ... | ) | __riscv_vfsqrt_v_f32m1_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f32m2 | ( | ... | ) | __riscv_vfsqrt_v_f32m2(__VA_ARGS__) |
| #define vfsqrt_v_f32m2_m | ( | ... | ) | __riscv_vfsqrt_v_f32m2_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f32m4 | ( | ... | ) | __riscv_vfsqrt_v_f32m4(__VA_ARGS__) |
| #define vfsqrt_v_f32m4_m | ( | ... | ) | __riscv_vfsqrt_v_f32m4_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f32m8 | ( | ... | ) | __riscv_vfsqrt_v_f32m8(__VA_ARGS__) |
| #define vfsqrt_v_f32m8_m | ( | ... | ) | __riscv_vfsqrt_v_f32m8_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f32mf2 | ( | ... | ) | __riscv_vfsqrt_v_f32mf2(__VA_ARGS__) |
| #define vfsqrt_v_f32mf2_m | ( | ... | ) | __riscv_vfsqrt_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f64m1 | ( | ... | ) | __riscv_vfsqrt_v_f64m1(__VA_ARGS__) |
| #define vfsqrt_v_f64m1_m | ( | ... | ) | __riscv_vfsqrt_v_f64m1_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f64m2 | ( | ... | ) | __riscv_vfsqrt_v_f64m2(__VA_ARGS__) |
| #define vfsqrt_v_f64m2_m | ( | ... | ) | __riscv_vfsqrt_v_f64m2_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f64m4 | ( | ... | ) | __riscv_vfsqrt_v_f64m4(__VA_ARGS__) |
| #define vfsqrt_v_f64m4_m | ( | ... | ) | __riscv_vfsqrt_v_f64m4_tumu(__VA_ARGS__) |
| #define vfsqrt_v_f64m8 | ( | ... | ) | __riscv_vfsqrt_v_f64m8(__VA_ARGS__) |
| #define vfsqrt_v_f64m8_m | ( | ... | ) | __riscv_vfsqrt_v_f64m8_tumu(__VA_ARGS__) |
| #define vfsub_vf_f16m1 | ( | ... | ) | __riscv_vfsub_vf_f16m1(__VA_ARGS__) |
| #define vfsub_vf_f16m1_m | ( | ... | ) | __riscv_vfsub_vf_f16m1_tumu(__VA_ARGS__) |
| #define vfsub_vf_f16m2 | ( | ... | ) | __riscv_vfsub_vf_f16m2(__VA_ARGS__) |
| #define vfsub_vf_f16m2_m | ( | ... | ) | __riscv_vfsub_vf_f16m2_tumu(__VA_ARGS__) |
| #define vfsub_vf_f16m4 | ( | ... | ) | __riscv_vfsub_vf_f16m4(__VA_ARGS__) |
| #define vfsub_vf_f16m4_m | ( | ... | ) | __riscv_vfsub_vf_f16m4_tumu(__VA_ARGS__) |
| #define vfsub_vf_f16m8 | ( | ... | ) | __riscv_vfsub_vf_f16m8(__VA_ARGS__) |
| #define vfsub_vf_f16m8_m | ( | ... | ) | __riscv_vfsub_vf_f16m8_tumu(__VA_ARGS__) |
| #define vfsub_vf_f16mf2 | ( | ... | ) | __riscv_vfsub_vf_f16mf2(__VA_ARGS__) |
| #define vfsub_vf_f16mf2_m | ( | ... | ) | __riscv_vfsub_vf_f16mf2_tumu(__VA_ARGS__) |
| #define vfsub_vf_f16mf4 | ( | ... | ) | __riscv_vfsub_vf_f16mf4(__VA_ARGS__) |
| #define vfsub_vf_f16mf4_m | ( | ... | ) | __riscv_vfsub_vf_f16mf4_tumu(__VA_ARGS__) |
| #define vfsub_vf_f32m1 | ( | ... | ) | __riscv_vfsub_vf_f32m1(__VA_ARGS__) |
| #define vfsub_vf_f32m1_m | ( | ... | ) | __riscv_vfsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfsub_vf_f32m2 | ( | ... | ) | __riscv_vfsub_vf_f32m2(__VA_ARGS__) |
| #define vfsub_vf_f32m2_m | ( | ... | ) | __riscv_vfsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfsub_vf_f32m4 | ( | ... | ) | __riscv_vfsub_vf_f32m4(__VA_ARGS__) |
| #define vfsub_vf_f32m4_m | ( | ... | ) | __riscv_vfsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfsub_vf_f32m8 | ( | ... | ) | __riscv_vfsub_vf_f32m8(__VA_ARGS__) |
| #define vfsub_vf_f32m8_m | ( | ... | ) | __riscv_vfsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfsub_vf_f32mf2 | ( | ... | ) | __riscv_vfsub_vf_f32mf2(__VA_ARGS__) |
| #define vfsub_vf_f32mf2_m | ( | ... | ) | __riscv_vfsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfsub_vf_f64m1 | ( | ... | ) | __riscv_vfsub_vf_f64m1(__VA_ARGS__) |
| #define vfsub_vf_f64m1_m | ( | ... | ) | __riscv_vfsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfsub_vf_f64m2 | ( | ... | ) | __riscv_vfsub_vf_f64m2(__VA_ARGS__) |
| #define vfsub_vf_f64m2_m | ( | ... | ) | __riscv_vfsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfsub_vf_f64m4 | ( | ... | ) | __riscv_vfsub_vf_f64m4(__VA_ARGS__) |
| #define vfsub_vf_f64m4_m | ( | ... | ) | __riscv_vfsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfsub_vf_f64m8 | ( | ... | ) | __riscv_vfsub_vf_f64m8(__VA_ARGS__) |
| #define vfsub_vf_f64m8_m | ( | ... | ) | __riscv_vfsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfsub_vv_f16m1 | ( | ... | ) | __riscv_vfsub_vv_f16m1(__VA_ARGS__) |
| #define vfsub_vv_f16m1_m | ( | ... | ) | __riscv_vfsub_vv_f16m1_tumu(__VA_ARGS__) |
| #define vfsub_vv_f16m2 | ( | ... | ) | __riscv_vfsub_vv_f16m2(__VA_ARGS__) |
| #define vfsub_vv_f16m2_m | ( | ... | ) | __riscv_vfsub_vv_f16m2_tumu(__VA_ARGS__) |
| #define vfsub_vv_f16m4 | ( | ... | ) | __riscv_vfsub_vv_f16m4(__VA_ARGS__) |
| #define vfsub_vv_f16m4_m | ( | ... | ) | __riscv_vfsub_vv_f16m4_tumu(__VA_ARGS__) |
| #define vfsub_vv_f16m8 | ( | ... | ) | __riscv_vfsub_vv_f16m8(__VA_ARGS__) |
| #define vfsub_vv_f16m8_m | ( | ... | ) | __riscv_vfsub_vv_f16m8_tumu(__VA_ARGS__) |
| #define vfsub_vv_f16mf2 | ( | ... | ) | __riscv_vfsub_vv_f16mf2(__VA_ARGS__) |
| #define vfsub_vv_f16mf2_m | ( | ... | ) | __riscv_vfsub_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vfsub_vv_f16mf4 | ( | ... | ) | __riscv_vfsub_vv_f16mf4(__VA_ARGS__) |
| #define vfsub_vv_f16mf4_m | ( | ... | ) | __riscv_vfsub_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vfsub_vv_f32m1 | ( | ... | ) | __riscv_vfsub_vv_f32m1(__VA_ARGS__) |
| #define vfsub_vv_f32m1_m | ( | ... | ) | __riscv_vfsub_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfsub_vv_f32m2 | ( | ... | ) | __riscv_vfsub_vv_f32m2(__VA_ARGS__) |
| #define vfsub_vv_f32m2_m | ( | ... | ) | __riscv_vfsub_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfsub_vv_f32m4 | ( | ... | ) | __riscv_vfsub_vv_f32m4(__VA_ARGS__) |
| #define vfsub_vv_f32m4_m | ( | ... | ) | __riscv_vfsub_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfsub_vv_f32m8 | ( | ... | ) | __riscv_vfsub_vv_f32m8(__VA_ARGS__) |
| #define vfsub_vv_f32m8_m | ( | ... | ) | __riscv_vfsub_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfsub_vv_f32mf2 | ( | ... | ) | __riscv_vfsub_vv_f32mf2(__VA_ARGS__) |
| #define vfsub_vv_f32mf2_m | ( | ... | ) | __riscv_vfsub_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfsub_vv_f64m1 | ( | ... | ) | __riscv_vfsub_vv_f64m1(__VA_ARGS__) |
| #define vfsub_vv_f64m1_m | ( | ... | ) | __riscv_vfsub_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfsub_vv_f64m2 | ( | ... | ) | __riscv_vfsub_vv_f64m2(__VA_ARGS__) |
| #define vfsub_vv_f64m2_m | ( | ... | ) | __riscv_vfsub_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfsub_vv_f64m4 | ( | ... | ) | __riscv_vfsub_vv_f64m4(__VA_ARGS__) |
| #define vfsub_vv_f64m4_m | ( | ... | ) | __riscv_vfsub_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfsub_vv_f64m8 | ( | ... | ) | __riscv_vfsub_vv_f64m8(__VA_ARGS__) |
| #define vfsub_vv_f64m8_m | ( | ... | ) | __riscv_vfsub_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f32m1 | ( | ... | ) | __riscv_vfwadd_vf_f32m1(__VA_ARGS__) |
| #define vfwadd_vf_f32m1_m | ( | ... | ) | __riscv_vfwadd_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f32m2 | ( | ... | ) | __riscv_vfwadd_vf_f32m2(__VA_ARGS__) |
| #define vfwadd_vf_f32m2_m | ( | ... | ) | __riscv_vfwadd_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f32m4 | ( | ... | ) | __riscv_vfwadd_vf_f32m4(__VA_ARGS__) |
| #define vfwadd_vf_f32m4_m | ( | ... | ) | __riscv_vfwadd_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f32m8 | ( | ... | ) | __riscv_vfwadd_vf_f32m8(__VA_ARGS__) |
| #define vfwadd_vf_f32m8_m | ( | ... | ) | __riscv_vfwadd_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f32mf2 | ( | ... | ) | __riscv_vfwadd_vf_f32mf2(__VA_ARGS__) |
| #define vfwadd_vf_f32mf2_m | ( | ... | ) | __riscv_vfwadd_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f64m1 | ( | ... | ) | __riscv_vfwadd_vf_f64m1(__VA_ARGS__) |
| #define vfwadd_vf_f64m1_m | ( | ... | ) | __riscv_vfwadd_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f64m2 | ( | ... | ) | __riscv_vfwadd_vf_f64m2(__VA_ARGS__) |
| #define vfwadd_vf_f64m2_m | ( | ... | ) | __riscv_vfwadd_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f64m4 | ( | ... | ) | __riscv_vfwadd_vf_f64m4(__VA_ARGS__) |
| #define vfwadd_vf_f64m4_m | ( | ... | ) | __riscv_vfwadd_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfwadd_vf_f64m8 | ( | ... | ) | __riscv_vfwadd_vf_f64m8(__VA_ARGS__) |
| #define vfwadd_vf_f64m8_m | ( | ... | ) | __riscv_vfwadd_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f32m1 | ( | ... | ) | __riscv_vfwadd_vv_f32m1(__VA_ARGS__) |
| #define vfwadd_vv_f32m1_m | ( | ... | ) | __riscv_vfwadd_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f32m2 | ( | ... | ) | __riscv_vfwadd_vv_f32m2(__VA_ARGS__) |
| #define vfwadd_vv_f32m2_m | ( | ... | ) | __riscv_vfwadd_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f32m4 | ( | ... | ) | __riscv_vfwadd_vv_f32m4(__VA_ARGS__) |
| #define vfwadd_vv_f32m4_m | ( | ... | ) | __riscv_vfwadd_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f32m8 | ( | ... | ) | __riscv_vfwadd_vv_f32m8(__VA_ARGS__) |
| #define vfwadd_vv_f32m8_m | ( | ... | ) | __riscv_vfwadd_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f32mf2 | ( | ... | ) | __riscv_vfwadd_vv_f32mf2(__VA_ARGS__) |
| #define vfwadd_vv_f32mf2_m | ( | ... | ) | __riscv_vfwadd_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f64m1 | ( | ... | ) | __riscv_vfwadd_vv_f64m1(__VA_ARGS__) |
| #define vfwadd_vv_f64m1_m | ( | ... | ) | __riscv_vfwadd_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f64m2 | ( | ... | ) | __riscv_vfwadd_vv_f64m2(__VA_ARGS__) |
| #define vfwadd_vv_f64m2_m | ( | ... | ) | __riscv_vfwadd_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f64m4 | ( | ... | ) | __riscv_vfwadd_vv_f64m4(__VA_ARGS__) |
| #define vfwadd_vv_f64m4_m | ( | ... | ) | __riscv_vfwadd_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfwadd_vv_f64m8 | ( | ... | ) | __riscv_vfwadd_vv_f64m8(__VA_ARGS__) |
| #define vfwadd_vv_f64m8_m | ( | ... | ) | __riscv_vfwadd_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f32m1 | ( | ... | ) | __riscv_vfwadd_wf_f32m1(__VA_ARGS__) |
| #define vfwadd_wf_f32m1_m | ( | ... | ) | __riscv_vfwadd_wf_f32m1_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f32m2 | ( | ... | ) | __riscv_vfwadd_wf_f32m2(__VA_ARGS__) |
| #define vfwadd_wf_f32m2_m | ( | ... | ) | __riscv_vfwadd_wf_f32m2_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f32m4 | ( | ... | ) | __riscv_vfwadd_wf_f32m4(__VA_ARGS__) |
| #define vfwadd_wf_f32m4_m | ( | ... | ) | __riscv_vfwadd_wf_f32m4_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f32m8 | ( | ... | ) | __riscv_vfwadd_wf_f32m8(__VA_ARGS__) |
| #define vfwadd_wf_f32m8_m | ( | ... | ) | __riscv_vfwadd_wf_f32m8_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f32mf2 | ( | ... | ) | __riscv_vfwadd_wf_f32mf2(__VA_ARGS__) |
| #define vfwadd_wf_f32mf2_m | ( | ... | ) | __riscv_vfwadd_wf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f64m1 | ( | ... | ) | __riscv_vfwadd_wf_f64m1(__VA_ARGS__) |
| #define vfwadd_wf_f64m1_m | ( | ... | ) | __riscv_vfwadd_wf_f64m1_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f64m2 | ( | ... | ) | __riscv_vfwadd_wf_f64m2(__VA_ARGS__) |
| #define vfwadd_wf_f64m2_m | ( | ... | ) | __riscv_vfwadd_wf_f64m2_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f64m4 | ( | ... | ) | __riscv_vfwadd_wf_f64m4(__VA_ARGS__) |
| #define vfwadd_wf_f64m4_m | ( | ... | ) | __riscv_vfwadd_wf_f64m4_tumu(__VA_ARGS__) |
| #define vfwadd_wf_f64m8 | ( | ... | ) | __riscv_vfwadd_wf_f64m8(__VA_ARGS__) |
| #define vfwadd_wf_f64m8_m | ( | ... | ) | __riscv_vfwadd_wf_f64m8_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f32m1 | ( | ... | ) | __riscv_vfwadd_wv_f32m1(__VA_ARGS__) |
| #define vfwadd_wv_f32m1_m | ( | ... | ) | __riscv_vfwadd_wv_f32m1_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f32m2 | ( | ... | ) | __riscv_vfwadd_wv_f32m2(__VA_ARGS__) |
| #define vfwadd_wv_f32m2_m | ( | ... | ) | __riscv_vfwadd_wv_f32m2_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f32m4 | ( | ... | ) | __riscv_vfwadd_wv_f32m4(__VA_ARGS__) |
| #define vfwadd_wv_f32m4_m | ( | ... | ) | __riscv_vfwadd_wv_f32m4_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f32m8 | ( | ... | ) | __riscv_vfwadd_wv_f32m8(__VA_ARGS__) |
| #define vfwadd_wv_f32m8_m | ( | ... | ) | __riscv_vfwadd_wv_f32m8_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f32mf2 | ( | ... | ) | __riscv_vfwadd_wv_f32mf2(__VA_ARGS__) |
| #define vfwadd_wv_f32mf2_m | ( | ... | ) | __riscv_vfwadd_wv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f64m1 | ( | ... | ) | __riscv_vfwadd_wv_f64m1(__VA_ARGS__) |
| #define vfwadd_wv_f64m1_m | ( | ... | ) | __riscv_vfwadd_wv_f64m1_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f64m2 | ( | ... | ) | __riscv_vfwadd_wv_f64m2(__VA_ARGS__) |
| #define vfwadd_wv_f64m2_m | ( | ... | ) | __riscv_vfwadd_wv_f64m2_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f64m4 | ( | ... | ) | __riscv_vfwadd_wv_f64m4(__VA_ARGS__) |
| #define vfwadd_wv_f64m4_m | ( | ... | ) | __riscv_vfwadd_wv_f64m4_tumu(__VA_ARGS__) |
| #define vfwadd_wv_f64m8 | ( | ... | ) | __riscv_vfwadd_wv_f64m8(__VA_ARGS__) |
| #define vfwadd_wv_f64m8_m | ( | ... | ) | __riscv_vfwadd_wv_f64m8_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32m1 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32m1(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32m1_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32m1_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32m2 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32m2(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32m2_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32m2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32m4 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32m4(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32m4_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32m4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32m8 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32m8(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32m8_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32m8_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32mf2 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32mf2(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f32mf2_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f64m1 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f64m1(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f64m1_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f64m1_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f64m2 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f64m2(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f64m2_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f64m2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f64m4 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f64m4(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f64m4_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f64m4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f64m8 | ( | ... | ) | __riscv_vfwcvt_f_f_v_f64m8(__VA_ARGS__) |
| #define vfwcvt_f_f_v_f64m8_m | ( | ... | ) | __riscv_vfwcvt_f_f_v_f64m8_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16m1 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16m1(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16m1_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16m1_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16m2 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16m2(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16m2_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16m2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16m4 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16m4(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16m4_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16m4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16m8 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16m8(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16m8_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16m8_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16mf2 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16mf2(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16mf2_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16mf4 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16mf4(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f16mf4_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32m1 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32m1(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32m1_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32m1_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32m2 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32m2(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32m2_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32m2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32m4 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32m4(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32m4_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32m4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32m8 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32m8(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32m8_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32m8_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32mf2 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32mf2(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f32mf2_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f64m1 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f64m1(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f64m1_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f64m1_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f64m2 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f64m2(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f64m2_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f64m2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f64m4 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f64m4(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f64m4_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f64m4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f64m8 | ( | ... | ) | __riscv_vfwcvt_f_x_v_f64m8(__VA_ARGS__) |
| #define vfwcvt_f_x_v_f64m8_m | ( | ... | ) | __riscv_vfwcvt_f_x_v_f64m8_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16m1 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16m1(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16m1_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16m1_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16m2 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16m2(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16m2_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16m2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16m4 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16m4(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16m4_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16m4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16m8 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16m8(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16m8_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16m8_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16mf2 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16mf2(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16mf2_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16mf4 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16mf4(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f16mf4_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f16mf4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32m1 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32m1(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32m1_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32m1_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32m2 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32m2(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32m2_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32m2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32m4 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32m4(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32m4_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32m4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32m8 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32m8(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32m8_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32m8_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32mf2 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32mf2(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f32mf2_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f32mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f64m1 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f64m1(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f64m1_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f64m1_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f64m2 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f64m2(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f64m2_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f64m2_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f64m4 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f64m4(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f64m4_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f64m4_tumu(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f64m8 | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f64m8(__VA_ARGS__) |
| #define vfwcvt_f_xu_v_f64m8_m | ( | ... | ) | __riscv_vfwcvt_f_xu_v_f64m8_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32m1 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32m1(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32m1_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32m1_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32m2 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32m2(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32m2_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32m2_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32m4 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32m4(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32m4_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32m4_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32m8 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32m8(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32m8_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32m8_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32mf2 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32mf2(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i32mf2_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i32mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i64m1 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i64m1(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i64m1_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i64m1_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i64m2 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i64m2(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i64m2_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i64m2_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i64m4 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i64m4(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i64m4_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i64m4_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i64m8 | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i64m8(__VA_ARGS__) |
| #define vfwcvt_rtz_x_f_v_i64m8_m | ( | ... | ) | __riscv_vfwcvt_rtz_x_f_v_i64m8_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32m1 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32m1(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32m1_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32m1_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32m2 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32m2(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32m2_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32m2_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32m4 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32m4(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32m4_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32m4_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32m8 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32m8(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32m8_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32m8_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32mf2 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32mf2(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u32mf2_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u32mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u64m1 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u64m1(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u64m1_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u64m1_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u64m2 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u64m2(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u64m2_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u64m2_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u64m4 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u64m4(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u64m4_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u64m4_tumu(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u64m8 | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u64m8(__VA_ARGS__) |
| #define vfwcvt_rtz_xu_f_v_u64m8_m | ( | ... | ) | __riscv_vfwcvt_rtz_xu_f_v_u64m8_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32m1 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32m1(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32m1_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32m1_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32m2 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32m2(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32m2_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32m2_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32m4 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32m4(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32m4_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32m4_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32m8 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32m8(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32m8_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32m8_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32mf2 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32mf2(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i32mf2_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i32mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i64m1 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i64m1(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i64m1_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i64m1_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i64m2 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i64m2(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i64m2_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i64m2_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i64m4 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i64m4(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i64m4_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i64m4_tumu(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i64m8 | ( | ... | ) | __riscv_vfwcvt_x_f_v_i64m8(__VA_ARGS__) |
| #define vfwcvt_x_f_v_i64m8_m | ( | ... | ) | __riscv_vfwcvt_x_f_v_i64m8_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32m1 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32m1(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32m1_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32m1_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32m2 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32m2(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32m2_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32m2_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32m4 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32m4(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32m4_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32m4_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32m8 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32m8(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32m8_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32m8_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32mf2 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32mf2(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u32mf2_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u32mf2_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u64m1 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u64m1(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u64m1_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u64m1_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u64m2 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u64m2(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u64m2_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u64m2_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u64m4 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u64m4(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u64m4_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u64m4_tumu(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u64m8 | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u64m8(__VA_ARGS__) |
| #define vfwcvt_xu_f_v_u64m8_m | ( | ... | ) | __riscv_vfwcvt_xu_f_v_u64m8_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f32m1 | ( | ... | ) | __riscv_vfwmacc_vf_f32m1_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f32m1_m | ( | ... | ) | __riscv_vfwmacc_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f32m2 | ( | ... | ) | __riscv_vfwmacc_vf_f32m2_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f32m2_m | ( | ... | ) | __riscv_vfwmacc_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f32m4 | ( | ... | ) | __riscv_vfwmacc_vf_f32m4_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f32m4_m | ( | ... | ) | __riscv_vfwmacc_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f32m8 | ( | ... | ) | __riscv_vfwmacc_vf_f32m8_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f32m8_m | ( | ... | ) | __riscv_vfwmacc_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f32mf2 | ( | ... | ) | __riscv_vfwmacc_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f32mf2_m | ( | ... | ) | __riscv_vfwmacc_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f64m1 | ( | ... | ) | __riscv_vfwmacc_vf_f64m1_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f64m1_m | ( | ... | ) | __riscv_vfwmacc_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f64m2 | ( | ... | ) | __riscv_vfwmacc_vf_f64m2_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f64m2_m | ( | ... | ) | __riscv_vfwmacc_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f64m4 | ( | ... | ) | __riscv_vfwmacc_vf_f64m4_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f64m4_m | ( | ... | ) | __riscv_vfwmacc_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfwmacc_vf_f64m8 | ( | ... | ) | __riscv_vfwmacc_vf_f64m8_tu(__VA_ARGS__) |
| #define vfwmacc_vf_f64m8_m | ( | ... | ) | __riscv_vfwmacc_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f32m1 | ( | ... | ) | __riscv_vfwmacc_vv_f32m1_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f32m1_m | ( | ... | ) | __riscv_vfwmacc_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f32m2 | ( | ... | ) | __riscv_vfwmacc_vv_f32m2_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f32m2_m | ( | ... | ) | __riscv_vfwmacc_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f32m4 | ( | ... | ) | __riscv_vfwmacc_vv_f32m4_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f32m4_m | ( | ... | ) | __riscv_vfwmacc_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f32m8 | ( | ... | ) | __riscv_vfwmacc_vv_f32m8_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f32m8_m | ( | ... | ) | __riscv_vfwmacc_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f32mf2 | ( | ... | ) | __riscv_vfwmacc_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f32mf2_m | ( | ... | ) | __riscv_vfwmacc_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f64m1 | ( | ... | ) | __riscv_vfwmacc_vv_f64m1_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f64m1_m | ( | ... | ) | __riscv_vfwmacc_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f64m2 | ( | ... | ) | __riscv_vfwmacc_vv_f64m2_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f64m2_m | ( | ... | ) | __riscv_vfwmacc_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f64m4 | ( | ... | ) | __riscv_vfwmacc_vv_f64m4_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f64m4_m | ( | ... | ) | __riscv_vfwmacc_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfwmacc_vv_f64m8 | ( | ... | ) | __riscv_vfwmacc_vv_f64m8_tu(__VA_ARGS__) |
| #define vfwmacc_vv_f64m8_m | ( | ... | ) | __riscv_vfwmacc_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f32m1 | ( | ... | ) | __riscv_vfwmsac_vf_f32m1_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f32m1_m | ( | ... | ) | __riscv_vfwmsac_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f32m2 | ( | ... | ) | __riscv_vfwmsac_vf_f32m2_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f32m2_m | ( | ... | ) | __riscv_vfwmsac_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f32m4 | ( | ... | ) | __riscv_vfwmsac_vf_f32m4_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f32m4_m | ( | ... | ) | __riscv_vfwmsac_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f32m8 | ( | ... | ) | __riscv_vfwmsac_vf_f32m8_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f32m8_m | ( | ... | ) | __riscv_vfwmsac_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f32mf2 | ( | ... | ) | __riscv_vfwmsac_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f32mf2_m | ( | ... | ) | __riscv_vfwmsac_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f64m1 | ( | ... | ) | __riscv_vfwmsac_vf_f64m1_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f64m1_m | ( | ... | ) | __riscv_vfwmsac_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f64m2 | ( | ... | ) | __riscv_vfwmsac_vf_f64m2_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f64m2_m | ( | ... | ) | __riscv_vfwmsac_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f64m4 | ( | ... | ) | __riscv_vfwmsac_vf_f64m4_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f64m4_m | ( | ... | ) | __riscv_vfwmsac_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfwmsac_vf_f64m8 | ( | ... | ) | __riscv_vfwmsac_vf_f64m8_tu(__VA_ARGS__) |
| #define vfwmsac_vf_f64m8_m | ( | ... | ) | __riscv_vfwmsac_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f32m1 | ( | ... | ) | __riscv_vfwmsac_vv_f32m1_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f32m1_m | ( | ... | ) | __riscv_vfwmsac_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f32m2 | ( | ... | ) | __riscv_vfwmsac_vv_f32m2_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f32m2_m | ( | ... | ) | __riscv_vfwmsac_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f32m4 | ( | ... | ) | __riscv_vfwmsac_vv_f32m4_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f32m4_m | ( | ... | ) | __riscv_vfwmsac_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f32m8 | ( | ... | ) | __riscv_vfwmsac_vv_f32m8_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f32m8_m | ( | ... | ) | __riscv_vfwmsac_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f32mf2 | ( | ... | ) | __riscv_vfwmsac_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f32mf2_m | ( | ... | ) | __riscv_vfwmsac_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f64m1 | ( | ... | ) | __riscv_vfwmsac_vv_f64m1_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f64m1_m | ( | ... | ) | __riscv_vfwmsac_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f64m2 | ( | ... | ) | __riscv_vfwmsac_vv_f64m2_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f64m2_m | ( | ... | ) | __riscv_vfwmsac_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f64m4 | ( | ... | ) | __riscv_vfwmsac_vv_f64m4_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f64m4_m | ( | ... | ) | __riscv_vfwmsac_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfwmsac_vv_f64m8 | ( | ... | ) | __riscv_vfwmsac_vv_f64m8_tu(__VA_ARGS__) |
| #define vfwmsac_vv_f64m8_m | ( | ... | ) | __riscv_vfwmsac_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f32m1 | ( | ... | ) | __riscv_vfwmul_vf_f32m1(__VA_ARGS__) |
| #define vfwmul_vf_f32m1_m | ( | ... | ) | __riscv_vfwmul_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f32m2 | ( | ... | ) | __riscv_vfwmul_vf_f32m2(__VA_ARGS__) |
| #define vfwmul_vf_f32m2_m | ( | ... | ) | __riscv_vfwmul_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f32m4 | ( | ... | ) | __riscv_vfwmul_vf_f32m4(__VA_ARGS__) |
| #define vfwmul_vf_f32m4_m | ( | ... | ) | __riscv_vfwmul_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f32m8 | ( | ... | ) | __riscv_vfwmul_vf_f32m8(__VA_ARGS__) |
| #define vfwmul_vf_f32m8_m | ( | ... | ) | __riscv_vfwmul_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f32mf2 | ( | ... | ) | __riscv_vfwmul_vf_f32mf2(__VA_ARGS__) |
| #define vfwmul_vf_f32mf2_m | ( | ... | ) | __riscv_vfwmul_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f64m1 | ( | ... | ) | __riscv_vfwmul_vf_f64m1(__VA_ARGS__) |
| #define vfwmul_vf_f64m1_m | ( | ... | ) | __riscv_vfwmul_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f64m2 | ( | ... | ) | __riscv_vfwmul_vf_f64m2(__VA_ARGS__) |
| #define vfwmul_vf_f64m2_m | ( | ... | ) | __riscv_vfwmul_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f64m4 | ( | ... | ) | __riscv_vfwmul_vf_f64m4(__VA_ARGS__) |
| #define vfwmul_vf_f64m4_m | ( | ... | ) | __riscv_vfwmul_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfwmul_vf_f64m8 | ( | ... | ) | __riscv_vfwmul_vf_f64m8(__VA_ARGS__) |
| #define vfwmul_vf_f64m8_m | ( | ... | ) | __riscv_vfwmul_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f32m1 | ( | ... | ) | __riscv_vfwmul_vv_f32m1(__VA_ARGS__) |
| #define vfwmul_vv_f32m1_m | ( | ... | ) | __riscv_vfwmul_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f32m2 | ( | ... | ) | __riscv_vfwmul_vv_f32m2(__VA_ARGS__) |
| #define vfwmul_vv_f32m2_m | ( | ... | ) | __riscv_vfwmul_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f32m4 | ( | ... | ) | __riscv_vfwmul_vv_f32m4(__VA_ARGS__) |
| #define vfwmul_vv_f32m4_m | ( | ... | ) | __riscv_vfwmul_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f32m8 | ( | ... | ) | __riscv_vfwmul_vv_f32m8(__VA_ARGS__) |
| #define vfwmul_vv_f32m8_m | ( | ... | ) | __riscv_vfwmul_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f32mf2 | ( | ... | ) | __riscv_vfwmul_vv_f32mf2(__VA_ARGS__) |
| #define vfwmul_vv_f32mf2_m | ( | ... | ) | __riscv_vfwmul_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f64m1 | ( | ... | ) | __riscv_vfwmul_vv_f64m1(__VA_ARGS__) |
| #define vfwmul_vv_f64m1_m | ( | ... | ) | __riscv_vfwmul_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f64m2 | ( | ... | ) | __riscv_vfwmul_vv_f64m2(__VA_ARGS__) |
| #define vfwmul_vv_f64m2_m | ( | ... | ) | __riscv_vfwmul_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f64m4 | ( | ... | ) | __riscv_vfwmul_vv_f64m4(__VA_ARGS__) |
| #define vfwmul_vv_f64m4_m | ( | ... | ) | __riscv_vfwmul_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfwmul_vv_f64m8 | ( | ... | ) | __riscv_vfwmul_vv_f64m8(__VA_ARGS__) |
| #define vfwmul_vv_f64m8_m | ( | ... | ) | __riscv_vfwmul_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32m1 | ( | ... | ) | __riscv_vfwnmacc_vf_f32m1_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32m1_m | ( | ... | ) | __riscv_vfwnmacc_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32m2 | ( | ... | ) | __riscv_vfwnmacc_vf_f32m2_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32m2_m | ( | ... | ) | __riscv_vfwnmacc_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32m4 | ( | ... | ) | __riscv_vfwnmacc_vf_f32m4_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32m4_m | ( | ... | ) | __riscv_vfwnmacc_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32m8 | ( | ... | ) | __riscv_vfwnmacc_vf_f32m8_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32m8_m | ( | ... | ) | __riscv_vfwnmacc_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32mf2 | ( | ... | ) | __riscv_vfwnmacc_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f32mf2_m | ( | ... | ) | __riscv_vfwnmacc_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f64m1 | ( | ... | ) | __riscv_vfwnmacc_vf_f64m1_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f64m1_m | ( | ... | ) | __riscv_vfwnmacc_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f64m2 | ( | ... | ) | __riscv_vfwnmacc_vf_f64m2_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f64m2_m | ( | ... | ) | __riscv_vfwnmacc_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f64m4 | ( | ... | ) | __riscv_vfwnmacc_vf_f64m4_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f64m4_m | ( | ... | ) | __riscv_vfwnmacc_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfwnmacc_vf_f64m8 | ( | ... | ) | __riscv_vfwnmacc_vf_f64m8_tu(__VA_ARGS__) |
| #define vfwnmacc_vf_f64m8_m | ( | ... | ) | __riscv_vfwnmacc_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32m1 | ( | ... | ) | __riscv_vfwnmacc_vv_f32m1_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32m1_m | ( | ... | ) | __riscv_vfwnmacc_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32m2 | ( | ... | ) | __riscv_vfwnmacc_vv_f32m2_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32m2_m | ( | ... | ) | __riscv_vfwnmacc_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32m4 | ( | ... | ) | __riscv_vfwnmacc_vv_f32m4_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32m4_m | ( | ... | ) | __riscv_vfwnmacc_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32m8 | ( | ... | ) | __riscv_vfwnmacc_vv_f32m8_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32m8_m | ( | ... | ) | __riscv_vfwnmacc_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32mf2 | ( | ... | ) | __riscv_vfwnmacc_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f32mf2_m | ( | ... | ) | __riscv_vfwnmacc_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f64m1 | ( | ... | ) | __riscv_vfwnmacc_vv_f64m1_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f64m1_m | ( | ... | ) | __riscv_vfwnmacc_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f64m2 | ( | ... | ) | __riscv_vfwnmacc_vv_f64m2_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f64m2_m | ( | ... | ) | __riscv_vfwnmacc_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f64m4 | ( | ... | ) | __riscv_vfwnmacc_vv_f64m4_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f64m4_m | ( | ... | ) | __riscv_vfwnmacc_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfwnmacc_vv_f64m8 | ( | ... | ) | __riscv_vfwnmacc_vv_f64m8_tu(__VA_ARGS__) |
| #define vfwnmacc_vv_f64m8_m | ( | ... | ) | __riscv_vfwnmacc_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32m1 | ( | ... | ) | __riscv_vfwnmsac_vf_f32m1_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32m1_m | ( | ... | ) | __riscv_vfwnmsac_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32m2 | ( | ... | ) | __riscv_vfwnmsac_vf_f32m2_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32m2_m | ( | ... | ) | __riscv_vfwnmsac_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32m4 | ( | ... | ) | __riscv_vfwnmsac_vf_f32m4_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32m4_m | ( | ... | ) | __riscv_vfwnmsac_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32m8 | ( | ... | ) | __riscv_vfwnmsac_vf_f32m8_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32m8_m | ( | ... | ) | __riscv_vfwnmsac_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32mf2 | ( | ... | ) | __riscv_vfwnmsac_vf_f32mf2_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f32mf2_m | ( | ... | ) | __riscv_vfwnmsac_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f64m1 | ( | ... | ) | __riscv_vfwnmsac_vf_f64m1_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f64m1_m | ( | ... | ) | __riscv_vfwnmsac_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f64m2 | ( | ... | ) | __riscv_vfwnmsac_vf_f64m2_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f64m2_m | ( | ... | ) | __riscv_vfwnmsac_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f64m4 | ( | ... | ) | __riscv_vfwnmsac_vf_f64m4_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f64m4_m | ( | ... | ) | __riscv_vfwnmsac_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfwnmsac_vf_f64m8 | ( | ... | ) | __riscv_vfwnmsac_vf_f64m8_tu(__VA_ARGS__) |
| #define vfwnmsac_vf_f64m8_m | ( | ... | ) | __riscv_vfwnmsac_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32m1 | ( | ... | ) | __riscv_vfwnmsac_vv_f32m1_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32m1_m | ( | ... | ) | __riscv_vfwnmsac_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32m2 | ( | ... | ) | __riscv_vfwnmsac_vv_f32m2_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32m2_m | ( | ... | ) | __riscv_vfwnmsac_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32m4 | ( | ... | ) | __riscv_vfwnmsac_vv_f32m4_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32m4_m | ( | ... | ) | __riscv_vfwnmsac_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32m8 | ( | ... | ) | __riscv_vfwnmsac_vv_f32m8_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32m8_m | ( | ... | ) | __riscv_vfwnmsac_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32mf2 | ( | ... | ) | __riscv_vfwnmsac_vv_f32mf2_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f32mf2_m | ( | ... | ) | __riscv_vfwnmsac_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f64m1 | ( | ... | ) | __riscv_vfwnmsac_vv_f64m1_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f64m1_m | ( | ... | ) | __riscv_vfwnmsac_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f64m2 | ( | ... | ) | __riscv_vfwnmsac_vv_f64m2_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f64m2_m | ( | ... | ) | __riscv_vfwnmsac_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f64m4 | ( | ... | ) | __riscv_vfwnmsac_vv_f64m4_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f64m4_m | ( | ... | ) | __riscv_vfwnmsac_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfwnmsac_vv_f64m8 | ( | ... | ) | __riscv_vfwnmsac_vv_f64m8_tu(__VA_ARGS__) |
| #define vfwnmsac_vv_f64m8_m | ( | ... | ) | __riscv_vfwnmsac_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfwredosum_vs_f16m1_f32m1 | ( | ... | ) | __riscv_vfwredosum_vs_f16m1_f32m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f16m1_f32m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f16m1_f32m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f16m2_f32m1 | ( | ... | ) | __riscv_vfwredosum_vs_f16m2_f32m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f16m2_f32m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f16m2_f32m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f16m4_f32m1 | ( | ... | ) | __riscv_vfwredosum_vs_f16m4_f32m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f16m4_f32m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f16m4_f32m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f16m8_f32m1 | ( | ... | ) | __riscv_vfwredosum_vs_f16m8_f32m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f16m8_f32m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f16m8_f32m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f16mf2_f32m1 | ( | ... | ) | __riscv_vfwredosum_vs_f16mf2_f32m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f16mf2_f32m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f16mf2_f32m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f16mf4_f32m1 | ( | ... | ) | __riscv_vfwredosum_vs_f16mf4_f32m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f16mf4_f32m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f16mf4_f32m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f32m1_f64m1 | ( | ... | ) | __riscv_vfwredosum_vs_f32m1_f64m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f32m1_f64m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f32m1_f64m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f32m2_f64m1 | ( | ... | ) | __riscv_vfwredosum_vs_f32m2_f64m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f32m2_f64m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f32m2_f64m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f32m4_f64m1 | ( | ... | ) | __riscv_vfwredosum_vs_f32m4_f64m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f32m4_f64m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f32m4_f64m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f32m8_f64m1 | ( | ... | ) | __riscv_vfwredosum_vs_f32m8_f64m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f32m8_f64m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f32m8_f64m1_tum(__VA_ARGS__) |
| #define vfwredosum_vs_f32mf2_f64m1 | ( | ... | ) | __riscv_vfwredosum_vs_f32mf2_f64m1_tu(__VA_ARGS__) |
| #define vfwredosum_vs_f32mf2_f64m1_m | ( | ... | ) | __riscv_vfwredosum_vs_f32mf2_f64m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f16m1_f32m1 | ( | ... | ) | __riscv_vfwredusum_vs_f16m1_f32m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f16m1_f32m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f16m1_f32m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f16m2_f32m1 | ( | ... | ) | __riscv_vfwredusum_vs_f16m2_f32m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f16m2_f32m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f16m2_f32m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f16m4_f32m1 | ( | ... | ) | __riscv_vfwredusum_vs_f16m4_f32m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f16m4_f32m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f16m4_f32m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f16m8_f32m1 | ( | ... | ) | __riscv_vfwredusum_vs_f16m8_f32m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f16m8_f32m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f16m8_f32m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f16mf2_f32m1 | ( | ... | ) | __riscv_vfwredusum_vs_f16mf2_f32m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f16mf2_f32m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f16mf2_f32m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f16mf4_f32m1 | ( | ... | ) | __riscv_vfwredusum_vs_f16mf4_f32m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f16mf4_f32m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f16mf4_f32m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f32m1_f64m1 | ( | ... | ) | __riscv_vfwredusum_vs_f32m1_f64m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f32m1_f64m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f32m1_f64m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f32m2_f64m1 | ( | ... | ) | __riscv_vfwredusum_vs_f32m2_f64m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f32m2_f64m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f32m2_f64m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f32m4_f64m1 | ( | ... | ) | __riscv_vfwredusum_vs_f32m4_f64m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f32m4_f64m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f32m4_f64m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f32m8_f64m1 | ( | ... | ) | __riscv_vfwredusum_vs_f32m8_f64m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f32m8_f64m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f32m8_f64m1_tum(__VA_ARGS__) |
| #define vfwredusum_vs_f32mf2_f64m1 | ( | ... | ) | __riscv_vfwredusum_vs_f32mf2_f64m1_tu(__VA_ARGS__) |
| #define vfwredusum_vs_f32mf2_f64m1_m | ( | ... | ) | __riscv_vfwredusum_vs_f32mf2_f64m1_tum(__VA_ARGS__) |
| #define vfwsub_vf_f32m1 | ( | ... | ) | __riscv_vfwsub_vf_f32m1(__VA_ARGS__) |
| #define vfwsub_vf_f32m1_m | ( | ... | ) | __riscv_vfwsub_vf_f32m1_tumu(__VA_ARGS__) |
| #define vfwsub_vf_f32m2 | ( | ... | ) | __riscv_vfwsub_vf_f32m2(__VA_ARGS__) |
| #define vfwsub_vf_f32m2_m | ( | ... | ) | __riscv_vfwsub_vf_f32m2_tumu(__VA_ARGS__) |
| #define vfwsub_vf_f32m4 | ( | ... | ) | __riscv_vfwsub_vf_f32m4(__VA_ARGS__) |
| #define vfwsub_vf_f32m4_m | ( | ... | ) | __riscv_vfwsub_vf_f32m4_tumu(__VA_ARGS__) |
| #define vfwsub_vf_f32m8 | ( | ... | ) | __riscv_vfwsub_vf_f32m8(__VA_ARGS__) |
| #define vfwsub_vf_f32m8_m | ( | ... | ) | __riscv_vfwsub_vf_f32m8_tumu(__VA_ARGS__) |
| #define vfwsub_vf_f32mf2 | ( | ... | ) | __riscv_vfwsub_vf_f32mf2(__VA_ARGS__) |
| #define vfwsub_vf_f32mf2_m | ( | ... | ) | __riscv_vfwsub_vf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwsub_vf_f64m1 | ( | ... | ) | __riscv_vfwsub_vf_f64m1(__VA_ARGS__) |
| #define vfwsub_vf_f64m1_m | ( | ... | ) | __riscv_vfwsub_vf_f64m1_tumu(__VA_ARGS__) |
| #define vfwsub_vf_f64m2 | ( | ... | ) | __riscv_vfwsub_vf_f64m2(__VA_ARGS__) |
| #define vfwsub_vf_f64m2_m | ( | ... | ) | __riscv_vfwsub_vf_f64m2_tumu(__VA_ARGS__) |
| #define vfwsub_vf_f64m4 | ( | ... | ) | __riscv_vfwsub_vf_f64m4(__VA_ARGS__) |
| #define vfwsub_vf_f64m4_m | ( | ... | ) | __riscv_vfwsub_vf_f64m4_tumu(__VA_ARGS__) |
| #define vfwsub_vf_f64m8 | ( | ... | ) | __riscv_vfwsub_vf_f64m8(__VA_ARGS__) |
| #define vfwsub_vf_f64m8_m | ( | ... | ) | __riscv_vfwsub_vf_f64m8_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f32m1 | ( | ... | ) | __riscv_vfwsub_vv_f32m1(__VA_ARGS__) |
| #define vfwsub_vv_f32m1_m | ( | ... | ) | __riscv_vfwsub_vv_f32m1_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f32m2 | ( | ... | ) | __riscv_vfwsub_vv_f32m2(__VA_ARGS__) |
| #define vfwsub_vv_f32m2_m | ( | ... | ) | __riscv_vfwsub_vv_f32m2_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f32m4 | ( | ... | ) | __riscv_vfwsub_vv_f32m4(__VA_ARGS__) |
| #define vfwsub_vv_f32m4_m | ( | ... | ) | __riscv_vfwsub_vv_f32m4_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f32m8 | ( | ... | ) | __riscv_vfwsub_vv_f32m8(__VA_ARGS__) |
| #define vfwsub_vv_f32m8_m | ( | ... | ) | __riscv_vfwsub_vv_f32m8_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f32mf2 | ( | ... | ) | __riscv_vfwsub_vv_f32mf2(__VA_ARGS__) |
| #define vfwsub_vv_f32mf2_m | ( | ... | ) | __riscv_vfwsub_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f64m1 | ( | ... | ) | __riscv_vfwsub_vv_f64m1(__VA_ARGS__) |
| #define vfwsub_vv_f64m1_m | ( | ... | ) | __riscv_vfwsub_vv_f64m1_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f64m2 | ( | ... | ) | __riscv_vfwsub_vv_f64m2(__VA_ARGS__) |
| #define vfwsub_vv_f64m2_m | ( | ... | ) | __riscv_vfwsub_vv_f64m2_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f64m4 | ( | ... | ) | __riscv_vfwsub_vv_f64m4(__VA_ARGS__) |
| #define vfwsub_vv_f64m4_m | ( | ... | ) | __riscv_vfwsub_vv_f64m4_tumu(__VA_ARGS__) |
| #define vfwsub_vv_f64m8 | ( | ... | ) | __riscv_vfwsub_vv_f64m8(__VA_ARGS__) |
| #define vfwsub_vv_f64m8_m | ( | ... | ) | __riscv_vfwsub_vv_f64m8_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f32m1 | ( | ... | ) | __riscv_vfwsub_wf_f32m1(__VA_ARGS__) |
| #define vfwsub_wf_f32m1_m | ( | ... | ) | __riscv_vfwsub_wf_f32m1_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f32m2 | ( | ... | ) | __riscv_vfwsub_wf_f32m2(__VA_ARGS__) |
| #define vfwsub_wf_f32m2_m | ( | ... | ) | __riscv_vfwsub_wf_f32m2_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f32m4 | ( | ... | ) | __riscv_vfwsub_wf_f32m4(__VA_ARGS__) |
| #define vfwsub_wf_f32m4_m | ( | ... | ) | __riscv_vfwsub_wf_f32m4_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f32m8 | ( | ... | ) | __riscv_vfwsub_wf_f32m8(__VA_ARGS__) |
| #define vfwsub_wf_f32m8_m | ( | ... | ) | __riscv_vfwsub_wf_f32m8_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f32mf2 | ( | ... | ) | __riscv_vfwsub_wf_f32mf2(__VA_ARGS__) |
| #define vfwsub_wf_f32mf2_m | ( | ... | ) | __riscv_vfwsub_wf_f32mf2_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f64m1 | ( | ... | ) | __riscv_vfwsub_wf_f64m1(__VA_ARGS__) |
| #define vfwsub_wf_f64m1_m | ( | ... | ) | __riscv_vfwsub_wf_f64m1_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f64m2 | ( | ... | ) | __riscv_vfwsub_wf_f64m2(__VA_ARGS__) |
| #define vfwsub_wf_f64m2_m | ( | ... | ) | __riscv_vfwsub_wf_f64m2_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f64m4 | ( | ... | ) | __riscv_vfwsub_wf_f64m4(__VA_ARGS__) |
| #define vfwsub_wf_f64m4_m | ( | ... | ) | __riscv_vfwsub_wf_f64m4_tumu(__VA_ARGS__) |
| #define vfwsub_wf_f64m8 | ( | ... | ) | __riscv_vfwsub_wf_f64m8(__VA_ARGS__) |
| #define vfwsub_wf_f64m8_m | ( | ... | ) | __riscv_vfwsub_wf_f64m8_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f32m1 | ( | ... | ) | __riscv_vfwsub_wv_f32m1(__VA_ARGS__) |
| #define vfwsub_wv_f32m1_m | ( | ... | ) | __riscv_vfwsub_wv_f32m1_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f32m2 | ( | ... | ) | __riscv_vfwsub_wv_f32m2(__VA_ARGS__) |
| #define vfwsub_wv_f32m2_m | ( | ... | ) | __riscv_vfwsub_wv_f32m2_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f32m4 | ( | ... | ) | __riscv_vfwsub_wv_f32m4(__VA_ARGS__) |
| #define vfwsub_wv_f32m4_m | ( | ... | ) | __riscv_vfwsub_wv_f32m4_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f32m8 | ( | ... | ) | __riscv_vfwsub_wv_f32m8(__VA_ARGS__) |
| #define vfwsub_wv_f32m8_m | ( | ... | ) | __riscv_vfwsub_wv_f32m8_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f32mf2 | ( | ... | ) | __riscv_vfwsub_wv_f32mf2(__VA_ARGS__) |
| #define vfwsub_wv_f32mf2_m | ( | ... | ) | __riscv_vfwsub_wv_f32mf2_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f64m1 | ( | ... | ) | __riscv_vfwsub_wv_f64m1(__VA_ARGS__) |
| #define vfwsub_wv_f64m1_m | ( | ... | ) | __riscv_vfwsub_wv_f64m1_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f64m2 | ( | ... | ) | __riscv_vfwsub_wv_f64m2(__VA_ARGS__) |
| #define vfwsub_wv_f64m2_m | ( | ... | ) | __riscv_vfwsub_wv_f64m2_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f64m4 | ( | ... | ) | __riscv_vfwsub_wv_f64m4(__VA_ARGS__) |
| #define vfwsub_wv_f64m4_m | ( | ... | ) | __riscv_vfwsub_wv_f64m4_tumu(__VA_ARGS__) |
| #define vfwsub_wv_f64m8 | ( | ... | ) | __riscv_vfwsub_wv_f64m8(__VA_ARGS__) |
| #define vfwsub_wv_f64m8_m | ( | ... | ) | __riscv_vfwsub_wv_f64m8_tumu(__VA_ARGS__) |
| #define vget_v_f16m2_f16m1 | ( | ... | ) | __riscv_vget_v_f16m2_f16m1(__VA_ARGS__) |
| #define vget_v_f16m4_f16m1 | ( | ... | ) | __riscv_vget_v_f16m4_f16m1(__VA_ARGS__) |
| #define vget_v_f16m4_f16m2 | ( | ... | ) | __riscv_vget_v_f16m4_f16m2(__VA_ARGS__) |
| #define vget_v_f16m8_f16m1 | ( | ... | ) | __riscv_vget_v_f16m8_f16m1(__VA_ARGS__) |
| #define vget_v_f16m8_f16m2 | ( | ... | ) | __riscv_vget_v_f16m8_f16m2(__VA_ARGS__) |
| #define vget_v_f16m8_f16m4 | ( | ... | ) | __riscv_vget_v_f16m8_f16m4(__VA_ARGS__) |
| #define vget_v_f32m2_f32m1 | ( | ... | ) | __riscv_vget_v_f32m2_f32m1(__VA_ARGS__) |
| #define vget_v_f32m4_f32m1 | ( | ... | ) | __riscv_vget_v_f32m4_f32m1(__VA_ARGS__) |
| #define vget_v_f32m4_f32m2 | ( | ... | ) | __riscv_vget_v_f32m4_f32m2(__VA_ARGS__) |
| #define vget_v_f32m8_f32m1 | ( | ... | ) | __riscv_vget_v_f32m8_f32m1(__VA_ARGS__) |
| #define vget_v_f32m8_f32m2 | ( | ... | ) | __riscv_vget_v_f32m8_f32m2(__VA_ARGS__) |
| #define vget_v_f32m8_f32m4 | ( | ... | ) | __riscv_vget_v_f32m8_f32m4(__VA_ARGS__) |
| #define vget_v_f64m2_f64m1 | ( | ... | ) | __riscv_vget_v_f64m2_f64m1(__VA_ARGS__) |
| #define vget_v_f64m4_f64m1 | ( | ... | ) | __riscv_vget_v_f64m4_f64m1(__VA_ARGS__) |
| #define vget_v_f64m4_f64m2 | ( | ... | ) | __riscv_vget_v_f64m4_f64m2(__VA_ARGS__) |
| #define vget_v_f64m8_f64m1 | ( | ... | ) | __riscv_vget_v_f64m8_f64m1(__VA_ARGS__) |
| #define vget_v_f64m8_f64m2 | ( | ... | ) | __riscv_vget_v_f64m8_f64m2(__VA_ARGS__) |
| #define vget_v_f64m8_f64m4 | ( | ... | ) | __riscv_vget_v_f64m8_f64m4(__VA_ARGS__) |
| #define vget_v_i16m2_i16m1 | ( | ... | ) | __riscv_vget_v_i16m2_i16m1(__VA_ARGS__) |
| #define vget_v_i16m4_i16m1 | ( | ... | ) | __riscv_vget_v_i16m4_i16m1(__VA_ARGS__) |
| #define vget_v_i16m4_i16m2 | ( | ... | ) | __riscv_vget_v_i16m4_i16m2(__VA_ARGS__) |
| #define vget_v_i16m8_i16m1 | ( | ... | ) | __riscv_vget_v_i16m8_i16m1(__VA_ARGS__) |
| #define vget_v_i16m8_i16m2 | ( | ... | ) | __riscv_vget_v_i16m8_i16m2(__VA_ARGS__) |
| #define vget_v_i16m8_i16m4 | ( | ... | ) | __riscv_vget_v_i16m8_i16m4(__VA_ARGS__) |
| #define vget_v_i32m2_i32m1 | ( | ... | ) | __riscv_vget_v_i32m2_i32m1(__VA_ARGS__) |
| #define vget_v_i32m4_i32m1 | ( | ... | ) | __riscv_vget_v_i32m4_i32m1(__VA_ARGS__) |
| #define vget_v_i32m4_i32m2 | ( | ... | ) | __riscv_vget_v_i32m4_i32m2(__VA_ARGS__) |
| #define vget_v_i32m8_i32m1 | ( | ... | ) | __riscv_vget_v_i32m8_i32m1(__VA_ARGS__) |
| #define vget_v_i32m8_i32m2 | ( | ... | ) | __riscv_vget_v_i32m8_i32m2(__VA_ARGS__) |
| #define vget_v_i32m8_i32m4 | ( | ... | ) | __riscv_vget_v_i32m8_i32m4(__VA_ARGS__) |
| #define vget_v_i64m2_i64m1 | ( | ... | ) | __riscv_vget_v_i64m2_i64m1(__VA_ARGS__) |
| #define vget_v_i64m4_i64m1 | ( | ... | ) | __riscv_vget_v_i64m4_i64m1(__VA_ARGS__) |
| #define vget_v_i64m4_i64m2 | ( | ... | ) | __riscv_vget_v_i64m4_i64m2(__VA_ARGS__) |
| #define vget_v_i64m8_i64m1 | ( | ... | ) | __riscv_vget_v_i64m8_i64m1(__VA_ARGS__) |
| #define vget_v_i64m8_i64m2 | ( | ... | ) | __riscv_vget_v_i64m8_i64m2(__VA_ARGS__) |
| #define vget_v_i64m8_i64m4 | ( | ... | ) | __riscv_vget_v_i64m8_i64m4(__VA_ARGS__) |
| #define vget_v_i8m2_i8m1 | ( | ... | ) | __riscv_vget_v_i8m2_i8m1(__VA_ARGS__) |
| #define vget_v_i8m4_i8m1 | ( | ... | ) | __riscv_vget_v_i8m4_i8m1(__VA_ARGS__) |
| #define vget_v_i8m4_i8m2 | ( | ... | ) | __riscv_vget_v_i8m4_i8m2(__VA_ARGS__) |
| #define vget_v_i8m8_i8m1 | ( | ... | ) | __riscv_vget_v_i8m8_i8m1(__VA_ARGS__) |
| #define vget_v_i8m8_i8m2 | ( | ... | ) | __riscv_vget_v_i8m8_i8m2(__VA_ARGS__) |
| #define vget_v_i8m8_i8m4 | ( | ... | ) | __riscv_vget_v_i8m8_i8m4(__VA_ARGS__) |
| #define vget_v_u16m2_u16m1 | ( | ... | ) | __riscv_vget_v_u16m2_u16m1(__VA_ARGS__) |
| #define vget_v_u16m4_u16m1 | ( | ... | ) | __riscv_vget_v_u16m4_u16m1(__VA_ARGS__) |
| #define vget_v_u16m4_u16m2 | ( | ... | ) | __riscv_vget_v_u16m4_u16m2(__VA_ARGS__) |
| #define vget_v_u16m8_u16m1 | ( | ... | ) | __riscv_vget_v_u16m8_u16m1(__VA_ARGS__) |
| #define vget_v_u16m8_u16m2 | ( | ... | ) | __riscv_vget_v_u16m8_u16m2(__VA_ARGS__) |
| #define vget_v_u16m8_u16m4 | ( | ... | ) | __riscv_vget_v_u16m8_u16m4(__VA_ARGS__) |
| #define vget_v_u32m2_u32m1 | ( | ... | ) | __riscv_vget_v_u32m2_u32m1(__VA_ARGS__) |
| #define vget_v_u32m4_u32m1 | ( | ... | ) | __riscv_vget_v_u32m4_u32m1(__VA_ARGS__) |
| #define vget_v_u32m4_u32m2 | ( | ... | ) | __riscv_vget_v_u32m4_u32m2(__VA_ARGS__) |
| #define vget_v_u32m8_u32m1 | ( | ... | ) | __riscv_vget_v_u32m8_u32m1(__VA_ARGS__) |
| #define vget_v_u32m8_u32m2 | ( | ... | ) | __riscv_vget_v_u32m8_u32m2(__VA_ARGS__) |
| #define vget_v_u32m8_u32m4 | ( | ... | ) | __riscv_vget_v_u32m8_u32m4(__VA_ARGS__) |
| #define vget_v_u64m2_u64m1 | ( | ... | ) | __riscv_vget_v_u64m2_u64m1(__VA_ARGS__) |
| #define vget_v_u64m4_u64m1 | ( | ... | ) | __riscv_vget_v_u64m4_u64m1(__VA_ARGS__) |
| #define vget_v_u64m4_u64m2 | ( | ... | ) | __riscv_vget_v_u64m4_u64m2(__VA_ARGS__) |
| #define vget_v_u64m8_u64m1 | ( | ... | ) | __riscv_vget_v_u64m8_u64m1(__VA_ARGS__) |
| #define vget_v_u64m8_u64m2 | ( | ... | ) | __riscv_vget_v_u64m8_u64m2(__VA_ARGS__) |
| #define vget_v_u64m8_u64m4 | ( | ... | ) | __riscv_vget_v_u64m8_u64m4(__VA_ARGS__) |
| #define vget_v_u8m2_u8m1 | ( | ... | ) | __riscv_vget_v_u8m2_u8m1(__VA_ARGS__) |
| #define vget_v_u8m4_u8m1 | ( | ... | ) | __riscv_vget_v_u8m4_u8m1(__VA_ARGS__) |
| #define vget_v_u8m4_u8m2 | ( | ... | ) | __riscv_vget_v_u8m4_u8m2(__VA_ARGS__) |
| #define vget_v_u8m8_u8m1 | ( | ... | ) | __riscv_vget_v_u8m8_u8m1(__VA_ARGS__) |
| #define vget_v_u8m8_u8m2 | ( | ... | ) | __riscv_vget_v_u8m8_u8m2(__VA_ARGS__) |
| #define vget_v_u8m8_u8m4 | ( | ... | ) | __riscv_vget_v_u8m8_u8m4(__VA_ARGS__) |
| #define vid_v_u16m1 | ( | ... | ) | __riscv_vid_v_u16m1(__VA_ARGS__) |
| #define vid_v_u16m1_m | ( | ... | ) | __riscv_vid_v_u16m1_tumu(__VA_ARGS__) |
| #define vid_v_u16m2 | ( | ... | ) | __riscv_vid_v_u16m2(__VA_ARGS__) |
| #define vid_v_u16m2_m | ( | ... | ) | __riscv_vid_v_u16m2_tumu(__VA_ARGS__) |
| #define vid_v_u16m4 | ( | ... | ) | __riscv_vid_v_u16m4(__VA_ARGS__) |
| #define vid_v_u16m4_m | ( | ... | ) | __riscv_vid_v_u16m4_tumu(__VA_ARGS__) |
| #define vid_v_u16m8 | ( | ... | ) | __riscv_vid_v_u16m8(__VA_ARGS__) |
| #define vid_v_u16m8_m | ( | ... | ) | __riscv_vid_v_u16m8_tumu(__VA_ARGS__) |
| #define vid_v_u16mf2 | ( | ... | ) | __riscv_vid_v_u16mf2(__VA_ARGS__) |
| #define vid_v_u16mf2_m | ( | ... | ) | __riscv_vid_v_u16mf2_tumu(__VA_ARGS__) |
| #define vid_v_u16mf4 | ( | ... | ) | __riscv_vid_v_u16mf4(__VA_ARGS__) |
| #define vid_v_u16mf4_m | ( | ... | ) | __riscv_vid_v_u16mf4_tumu(__VA_ARGS__) |
| #define vid_v_u32m1 | ( | ... | ) | __riscv_vid_v_u32m1(__VA_ARGS__) |
| #define vid_v_u32m1_m | ( | ... | ) | __riscv_vid_v_u32m1_tumu(__VA_ARGS__) |
| #define vid_v_u32m2 | ( | ... | ) | __riscv_vid_v_u32m2(__VA_ARGS__) |
| #define vid_v_u32m2_m | ( | ... | ) | __riscv_vid_v_u32m2_tumu(__VA_ARGS__) |
| #define vid_v_u32m4 | ( | ... | ) | __riscv_vid_v_u32m4(__VA_ARGS__) |
| #define vid_v_u32m4_m | ( | ... | ) | __riscv_vid_v_u32m4_tumu(__VA_ARGS__) |
| #define vid_v_u32m8 | ( | ... | ) | __riscv_vid_v_u32m8(__VA_ARGS__) |
| #define vid_v_u32m8_m | ( | ... | ) | __riscv_vid_v_u32m8_tumu(__VA_ARGS__) |
| #define vid_v_u32mf2 | ( | ... | ) | __riscv_vid_v_u32mf2(__VA_ARGS__) |
| #define vid_v_u32mf2_m | ( | ... | ) | __riscv_vid_v_u32mf2_tumu(__VA_ARGS__) |
| #define vid_v_u64m1 | ( | ... | ) | __riscv_vid_v_u64m1(__VA_ARGS__) |
| #define vid_v_u64m1_m | ( | ... | ) | __riscv_vid_v_u64m1_tumu(__VA_ARGS__) |
| #define vid_v_u64m2 | ( | ... | ) | __riscv_vid_v_u64m2(__VA_ARGS__) |
| #define vid_v_u64m2_m | ( | ... | ) | __riscv_vid_v_u64m2_tumu(__VA_ARGS__) |
| #define vid_v_u64m4 | ( | ... | ) | __riscv_vid_v_u64m4(__VA_ARGS__) |
| #define vid_v_u64m4_m | ( | ... | ) | __riscv_vid_v_u64m4_tumu(__VA_ARGS__) |
| #define vid_v_u64m8 | ( | ... | ) | __riscv_vid_v_u64m8(__VA_ARGS__) |
| #define vid_v_u64m8_m | ( | ... | ) | __riscv_vid_v_u64m8_tumu(__VA_ARGS__) |
| #define vid_v_u8m1 | ( | ... | ) | __riscv_vid_v_u8m1(__VA_ARGS__) |
| #define vid_v_u8m1_m | ( | ... | ) | __riscv_vid_v_u8m1_tumu(__VA_ARGS__) |
| #define vid_v_u8m2 | ( | ... | ) | __riscv_vid_v_u8m2(__VA_ARGS__) |
| #define vid_v_u8m2_m | ( | ... | ) | __riscv_vid_v_u8m2_tumu(__VA_ARGS__) |
| #define vid_v_u8m4 | ( | ... | ) | __riscv_vid_v_u8m4(__VA_ARGS__) |
| #define vid_v_u8m4_m | ( | ... | ) | __riscv_vid_v_u8m4_tumu(__VA_ARGS__) |
| #define vid_v_u8m8 | ( | ... | ) | __riscv_vid_v_u8m8(__VA_ARGS__) |
| #define vid_v_u8m8_m | ( | ... | ) | __riscv_vid_v_u8m8_tumu(__VA_ARGS__) |
| #define vid_v_u8mf2 | ( | ... | ) | __riscv_vid_v_u8mf2(__VA_ARGS__) |
| #define vid_v_u8mf2_m | ( | ... | ) | __riscv_vid_v_u8mf2_tumu(__VA_ARGS__) |
| #define vid_v_u8mf4 | ( | ... | ) | __riscv_vid_v_u8mf4(__VA_ARGS__) |
| #define vid_v_u8mf4_m | ( | ... | ) | __riscv_vid_v_u8mf4_tumu(__VA_ARGS__) |
| #define vid_v_u8mf8 | ( | ... | ) | __riscv_vid_v_u8mf8(__VA_ARGS__) |
| #define vid_v_u8mf8_m | ( | ... | ) | __riscv_vid_v_u8mf8_tumu(__VA_ARGS__) |
| #define viota_m_u16m1 | ( | ... | ) | __riscv_viota_m_u16m1(__VA_ARGS__) |
| #define viota_m_u16m1_m | ( | ... | ) | __riscv_viota_m_u16m1_tumu(__VA_ARGS__) |
| #define viota_m_u16m2 | ( | ... | ) | __riscv_viota_m_u16m2(__VA_ARGS__) |
| #define viota_m_u16m2_m | ( | ... | ) | __riscv_viota_m_u16m2_tumu(__VA_ARGS__) |
| #define viota_m_u16m4 | ( | ... | ) | __riscv_viota_m_u16m4(__VA_ARGS__) |
| #define viota_m_u16m4_m | ( | ... | ) | __riscv_viota_m_u16m4_tumu(__VA_ARGS__) |
| #define viota_m_u16m8 | ( | ... | ) | __riscv_viota_m_u16m8(__VA_ARGS__) |
| #define viota_m_u16m8_m | ( | ... | ) | __riscv_viota_m_u16m8_tumu(__VA_ARGS__) |
| #define viota_m_u16mf2 | ( | ... | ) | __riscv_viota_m_u16mf2(__VA_ARGS__) |
| #define viota_m_u16mf2_m | ( | ... | ) | __riscv_viota_m_u16mf2_tumu(__VA_ARGS__) |
| #define viota_m_u16mf4 | ( | ... | ) | __riscv_viota_m_u16mf4(__VA_ARGS__) |
| #define viota_m_u16mf4_m | ( | ... | ) | __riscv_viota_m_u16mf4_tumu(__VA_ARGS__) |
| #define viota_m_u32m1 | ( | ... | ) | __riscv_viota_m_u32m1(__VA_ARGS__) |
| #define viota_m_u32m1_m | ( | ... | ) | __riscv_viota_m_u32m1_tumu(__VA_ARGS__) |
| #define viota_m_u32m2 | ( | ... | ) | __riscv_viota_m_u32m2(__VA_ARGS__) |
| #define viota_m_u32m2_m | ( | ... | ) | __riscv_viota_m_u32m2_tumu(__VA_ARGS__) |
| #define viota_m_u32m4 | ( | ... | ) | __riscv_viota_m_u32m4(__VA_ARGS__) |
| #define viota_m_u32m4_m | ( | ... | ) | __riscv_viota_m_u32m4_tumu(__VA_ARGS__) |
| #define viota_m_u32m8 | ( | ... | ) | __riscv_viota_m_u32m8(__VA_ARGS__) |
| #define viota_m_u32m8_m | ( | ... | ) | __riscv_viota_m_u32m8_tumu(__VA_ARGS__) |
| #define viota_m_u32mf2 | ( | ... | ) | __riscv_viota_m_u32mf2(__VA_ARGS__) |
| #define viota_m_u32mf2_m | ( | ... | ) | __riscv_viota_m_u32mf2_tumu(__VA_ARGS__) |
| #define viota_m_u64m1 | ( | ... | ) | __riscv_viota_m_u64m1(__VA_ARGS__) |
| #define viota_m_u64m1_m | ( | ... | ) | __riscv_viota_m_u64m1_tumu(__VA_ARGS__) |
| #define viota_m_u64m2 | ( | ... | ) | __riscv_viota_m_u64m2(__VA_ARGS__) |
| #define viota_m_u64m2_m | ( | ... | ) | __riscv_viota_m_u64m2_tumu(__VA_ARGS__) |
| #define viota_m_u64m4 | ( | ... | ) | __riscv_viota_m_u64m4(__VA_ARGS__) |
| #define viota_m_u64m4_m | ( | ... | ) | __riscv_viota_m_u64m4_tumu(__VA_ARGS__) |
| #define viota_m_u64m8 | ( | ... | ) | __riscv_viota_m_u64m8(__VA_ARGS__) |
| #define viota_m_u64m8_m | ( | ... | ) | __riscv_viota_m_u64m8_tumu(__VA_ARGS__) |
| #define viota_m_u8m1 | ( | ... | ) | __riscv_viota_m_u8m1(__VA_ARGS__) |
| #define viota_m_u8m1_m | ( | ... | ) | __riscv_viota_m_u8m1_tumu(__VA_ARGS__) |
| #define viota_m_u8m2 | ( | ... | ) | __riscv_viota_m_u8m2(__VA_ARGS__) |
| #define viota_m_u8m2_m | ( | ... | ) | __riscv_viota_m_u8m2_tumu(__VA_ARGS__) |
| #define viota_m_u8m4 | ( | ... | ) | __riscv_viota_m_u8m4(__VA_ARGS__) |
| #define viota_m_u8m4_m | ( | ... | ) | __riscv_viota_m_u8m4_tumu(__VA_ARGS__) |
| #define viota_m_u8m8 | ( | ... | ) | __riscv_viota_m_u8m8(__VA_ARGS__) |
| #define viota_m_u8m8_m | ( | ... | ) | __riscv_viota_m_u8m8_tumu(__VA_ARGS__) |
| #define viota_m_u8mf2 | ( | ... | ) | __riscv_viota_m_u8mf2(__VA_ARGS__) |
| #define viota_m_u8mf2_m | ( | ... | ) | __riscv_viota_m_u8mf2_tumu(__VA_ARGS__) |
| #define viota_m_u8mf4 | ( | ... | ) | __riscv_viota_m_u8mf4(__VA_ARGS__) |
| #define viota_m_u8mf4_m | ( | ... | ) | __riscv_viota_m_u8mf4_tumu(__VA_ARGS__) |
| #define viota_m_u8mf8 | ( | ... | ) | __riscv_viota_m_u8mf8(__VA_ARGS__) |
| #define viota_m_u8mf8_m | ( | ... | ) | __riscv_viota_m_u8mf8_tumu(__VA_ARGS__) |
| #define vle16_v_f16m1 | ( | ... | ) | __riscv_vle16_v_f16m1(__VA_ARGS__) |
| #define vle16_v_f16m1_m | ( | ... | ) | __riscv_vle16_v_f16m1_tumu(__VA_ARGS__) |
| #define vle16_v_f16m2 | ( | ... | ) | __riscv_vle16_v_f16m2(__VA_ARGS__) |
| #define vle16_v_f16m2_m | ( | ... | ) | __riscv_vle16_v_f16m2_tumu(__VA_ARGS__) |
| #define vle16_v_f16m4 | ( | ... | ) | __riscv_vle16_v_f16m4(__VA_ARGS__) |
| #define vle16_v_f16m4_m | ( | ... | ) | __riscv_vle16_v_f16m4_tumu(__VA_ARGS__) |
| #define vle16_v_f16m8 | ( | ... | ) | __riscv_vle16_v_f16m8(__VA_ARGS__) |
| #define vle16_v_f16m8_m | ( | ... | ) | __riscv_vle16_v_f16m8_tumu(__VA_ARGS__) |
| #define vle16_v_f16mf2 | ( | ... | ) | __riscv_vle16_v_f16mf2(__VA_ARGS__) |
| #define vle16_v_f16mf2_m | ( | ... | ) | __riscv_vle16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vle16_v_f16mf4 | ( | ... | ) | __riscv_vle16_v_f16mf4(__VA_ARGS__) |
| #define vle16_v_f16mf4_m | ( | ... | ) | __riscv_vle16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vle16_v_i16m1 | ( | ... | ) | __riscv_vle16_v_i16m1(__VA_ARGS__) |
| #define vle16_v_i16m1_m | ( | ... | ) | __riscv_vle16_v_i16m1_tumu(__VA_ARGS__) |
| #define vle16_v_i16m2 | ( | ... | ) | __riscv_vle16_v_i16m2(__VA_ARGS__) |
| #define vle16_v_i16m2_m | ( | ... | ) | __riscv_vle16_v_i16m2_tumu(__VA_ARGS__) |
| #define vle16_v_i16m4 | ( | ... | ) | __riscv_vle16_v_i16m4(__VA_ARGS__) |
| #define vle16_v_i16m4_m | ( | ... | ) | __riscv_vle16_v_i16m4_tumu(__VA_ARGS__) |
| #define vle16_v_i16m8 | ( | ... | ) | __riscv_vle16_v_i16m8(__VA_ARGS__) |
| #define vle16_v_i16m8_m | ( | ... | ) | __riscv_vle16_v_i16m8_tumu(__VA_ARGS__) |
| #define vle16_v_i16mf2 | ( | ... | ) | __riscv_vle16_v_i16mf2(__VA_ARGS__) |
| #define vle16_v_i16mf2_m | ( | ... | ) | __riscv_vle16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vle16_v_i16mf4 | ( | ... | ) | __riscv_vle16_v_i16mf4(__VA_ARGS__) |
| #define vle16_v_i16mf4_m | ( | ... | ) | __riscv_vle16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vle16_v_u16m1 | ( | ... | ) | __riscv_vle16_v_u16m1(__VA_ARGS__) |
| #define vle16_v_u16m1_m | ( | ... | ) | __riscv_vle16_v_u16m1_tumu(__VA_ARGS__) |
| #define vle16_v_u16m2 | ( | ... | ) | __riscv_vle16_v_u16m2(__VA_ARGS__) |
| #define vle16_v_u16m2_m | ( | ... | ) | __riscv_vle16_v_u16m2_tumu(__VA_ARGS__) |
| #define vle16_v_u16m4 | ( | ... | ) | __riscv_vle16_v_u16m4(__VA_ARGS__) |
| #define vle16_v_u16m4_m | ( | ... | ) | __riscv_vle16_v_u16m4_tumu(__VA_ARGS__) |
| #define vle16_v_u16m8 | ( | ... | ) | __riscv_vle16_v_u16m8(__VA_ARGS__) |
| #define vle16_v_u16m8_m | ( | ... | ) | __riscv_vle16_v_u16m8_tumu(__VA_ARGS__) |
| #define vle16_v_u16mf2 | ( | ... | ) | __riscv_vle16_v_u16mf2(__VA_ARGS__) |
| #define vle16_v_u16mf2_m | ( | ... | ) | __riscv_vle16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vle16_v_u16mf4 | ( | ... | ) | __riscv_vle16_v_u16mf4(__VA_ARGS__) |
| #define vle16_v_u16mf4_m | ( | ... | ) | __riscv_vle16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vle16ff_v_f16m1 | ( | ... | ) | __riscv_vle16ff_v_f16m1(__VA_ARGS__) |
| #define vle16ff_v_f16m1_m | ( | ... | ) | __riscv_vle16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define vle16ff_v_f16m2 | ( | ... | ) | __riscv_vle16ff_v_f16m2(__VA_ARGS__) |
| #define vle16ff_v_f16m2_m | ( | ... | ) | __riscv_vle16ff_v_f16m2_tumu(__VA_ARGS__) |
| #define vle16ff_v_f16m4 | ( | ... | ) | __riscv_vle16ff_v_f16m4(__VA_ARGS__) |
| #define vle16ff_v_f16m4_m | ( | ... | ) | __riscv_vle16ff_v_f16m4_tumu(__VA_ARGS__) |
| #define vle16ff_v_f16m8 | ( | ... | ) | __riscv_vle16ff_v_f16m8(__VA_ARGS__) |
| #define vle16ff_v_f16m8_m | ( | ... | ) | __riscv_vle16ff_v_f16m8_tumu(__VA_ARGS__) |
| #define vle16ff_v_f16mf2 | ( | ... | ) | __riscv_vle16ff_v_f16mf2(__VA_ARGS__) |
| #define vle16ff_v_f16mf2_m | ( | ... | ) | __riscv_vle16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define vle16ff_v_f16mf4 | ( | ... | ) | __riscv_vle16ff_v_f16mf4(__VA_ARGS__) |
| #define vle16ff_v_f16mf4_m | ( | ... | ) | __riscv_vle16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define vle16ff_v_i16m1 | ( | ... | ) | __riscv_vle16ff_v_i16m1(__VA_ARGS__) |
| #define vle16ff_v_i16m1_m | ( | ... | ) | __riscv_vle16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define vle16ff_v_i16m2 | ( | ... | ) | __riscv_vle16ff_v_i16m2(__VA_ARGS__) |
| #define vle16ff_v_i16m2_m | ( | ... | ) | __riscv_vle16ff_v_i16m2_tumu(__VA_ARGS__) |
| #define vle16ff_v_i16m4 | ( | ... | ) | __riscv_vle16ff_v_i16m4(__VA_ARGS__) |
| #define vle16ff_v_i16m4_m | ( | ... | ) | __riscv_vle16ff_v_i16m4_tumu(__VA_ARGS__) |
| #define vle16ff_v_i16m8 | ( | ... | ) | __riscv_vle16ff_v_i16m8(__VA_ARGS__) |
| #define vle16ff_v_i16m8_m | ( | ... | ) | __riscv_vle16ff_v_i16m8_tumu(__VA_ARGS__) |
| #define vle16ff_v_i16mf2 | ( | ... | ) | __riscv_vle16ff_v_i16mf2(__VA_ARGS__) |
| #define vle16ff_v_i16mf2_m | ( | ... | ) | __riscv_vle16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define vle16ff_v_i16mf4 | ( | ... | ) | __riscv_vle16ff_v_i16mf4(__VA_ARGS__) |
| #define vle16ff_v_i16mf4_m | ( | ... | ) | __riscv_vle16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define vle16ff_v_u16m1 | ( | ... | ) | __riscv_vle16ff_v_u16m1(__VA_ARGS__) |
| #define vle16ff_v_u16m1_m | ( | ... | ) | __riscv_vle16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define vle16ff_v_u16m2 | ( | ... | ) | __riscv_vle16ff_v_u16m2(__VA_ARGS__) |
| #define vle16ff_v_u16m2_m | ( | ... | ) | __riscv_vle16ff_v_u16m2_tumu(__VA_ARGS__) |
| #define vle16ff_v_u16m4 | ( | ... | ) | __riscv_vle16ff_v_u16m4(__VA_ARGS__) |
| #define vle16ff_v_u16m4_m | ( | ... | ) | __riscv_vle16ff_v_u16m4_tumu(__VA_ARGS__) |
| #define vle16ff_v_u16m8 | ( | ... | ) | __riscv_vle16ff_v_u16m8(__VA_ARGS__) |
| #define vle16ff_v_u16m8_m | ( | ... | ) | __riscv_vle16ff_v_u16m8_tumu(__VA_ARGS__) |
| #define vle16ff_v_u16mf2 | ( | ... | ) | __riscv_vle16ff_v_u16mf2(__VA_ARGS__) |
| #define vle16ff_v_u16mf2_m | ( | ... | ) | __riscv_vle16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define vle16ff_v_u16mf4 | ( | ... | ) | __riscv_vle16ff_v_u16mf4(__VA_ARGS__) |
| #define vle16ff_v_u16mf4_m | ( | ... | ) | __riscv_vle16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define vle32_v_f32m1 | ( | ... | ) | __riscv_vle32_v_f32m1(__VA_ARGS__) |
| #define vle32_v_f32m1_m | ( | ... | ) | __riscv_vle32_v_f32m1_tumu(__VA_ARGS__) |
| #define vle32_v_f32m2 | ( | ... | ) | __riscv_vle32_v_f32m2(__VA_ARGS__) |
| #define vle32_v_f32m2_m | ( | ... | ) | __riscv_vle32_v_f32m2_tumu(__VA_ARGS__) |
| #define vle32_v_f32m4 | ( | ... | ) | __riscv_vle32_v_f32m4(__VA_ARGS__) |
| #define vle32_v_f32m4_m | ( | ... | ) | __riscv_vle32_v_f32m4_tumu(__VA_ARGS__) |
| #define vle32_v_f32m8 | ( | ... | ) | __riscv_vle32_v_f32m8(__VA_ARGS__) |
| #define vle32_v_f32m8_m | ( | ... | ) | __riscv_vle32_v_f32m8_tumu(__VA_ARGS__) |
| #define vle32_v_f32mf2 | ( | ... | ) | __riscv_vle32_v_f32mf2(__VA_ARGS__) |
| #define vle32_v_f32mf2_m | ( | ... | ) | __riscv_vle32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vle32_v_i32m1 | ( | ... | ) | __riscv_vle32_v_i32m1(__VA_ARGS__) |
| #define vle32_v_i32m1_m | ( | ... | ) | __riscv_vle32_v_i32m1_tumu(__VA_ARGS__) |
| #define vle32_v_i32m2 | ( | ... | ) | __riscv_vle32_v_i32m2(__VA_ARGS__) |
| #define vle32_v_i32m2_m | ( | ... | ) | __riscv_vle32_v_i32m2_tumu(__VA_ARGS__) |
| #define vle32_v_i32m4 | ( | ... | ) | __riscv_vle32_v_i32m4(__VA_ARGS__) |
| #define vle32_v_i32m4_m | ( | ... | ) | __riscv_vle32_v_i32m4_tumu(__VA_ARGS__) |
| #define vle32_v_i32m8 | ( | ... | ) | __riscv_vle32_v_i32m8(__VA_ARGS__) |
| #define vle32_v_i32m8_m | ( | ... | ) | __riscv_vle32_v_i32m8_tumu(__VA_ARGS__) |
| #define vle32_v_i32mf2 | ( | ... | ) | __riscv_vle32_v_i32mf2(__VA_ARGS__) |
| #define vle32_v_i32mf2_m | ( | ... | ) | __riscv_vle32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vle32_v_u32m1 | ( | ... | ) | __riscv_vle32_v_u32m1(__VA_ARGS__) |
| #define vle32_v_u32m1_m | ( | ... | ) | __riscv_vle32_v_u32m1_tumu(__VA_ARGS__) |
| #define vle32_v_u32m2 | ( | ... | ) | __riscv_vle32_v_u32m2(__VA_ARGS__) |
| #define vle32_v_u32m2_m | ( | ... | ) | __riscv_vle32_v_u32m2_tumu(__VA_ARGS__) |
| #define vle32_v_u32m4 | ( | ... | ) | __riscv_vle32_v_u32m4(__VA_ARGS__) |
| #define vle32_v_u32m4_m | ( | ... | ) | __riscv_vle32_v_u32m4_tumu(__VA_ARGS__) |
| #define vle32_v_u32m8 | ( | ... | ) | __riscv_vle32_v_u32m8(__VA_ARGS__) |
| #define vle32_v_u32m8_m | ( | ... | ) | __riscv_vle32_v_u32m8_tumu(__VA_ARGS__) |
| #define vle32_v_u32mf2 | ( | ... | ) | __riscv_vle32_v_u32mf2(__VA_ARGS__) |
| #define vle32_v_u32mf2_m | ( | ... | ) | __riscv_vle32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vle32ff_v_f32m1 | ( | ... | ) | __riscv_vle32ff_v_f32m1(__VA_ARGS__) |
| #define vle32ff_v_f32m1_m | ( | ... | ) | __riscv_vle32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define vle32ff_v_f32m2 | ( | ... | ) | __riscv_vle32ff_v_f32m2(__VA_ARGS__) |
| #define vle32ff_v_f32m2_m | ( | ... | ) | __riscv_vle32ff_v_f32m2_tumu(__VA_ARGS__) |
| #define vle32ff_v_f32m4 | ( | ... | ) | __riscv_vle32ff_v_f32m4(__VA_ARGS__) |
| #define vle32ff_v_f32m4_m | ( | ... | ) | __riscv_vle32ff_v_f32m4_tumu(__VA_ARGS__) |
| #define vle32ff_v_f32m8 | ( | ... | ) | __riscv_vle32ff_v_f32m8(__VA_ARGS__) |
| #define vle32ff_v_f32m8_m | ( | ... | ) | __riscv_vle32ff_v_f32m8_tumu(__VA_ARGS__) |
| #define vle32ff_v_f32mf2 | ( | ... | ) | __riscv_vle32ff_v_f32mf2(__VA_ARGS__) |
| #define vle32ff_v_f32mf2_m | ( | ... | ) | __riscv_vle32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define vle32ff_v_i32m1 | ( | ... | ) | __riscv_vle32ff_v_i32m1(__VA_ARGS__) |
| #define vle32ff_v_i32m1_m | ( | ... | ) | __riscv_vle32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define vle32ff_v_i32m2 | ( | ... | ) | __riscv_vle32ff_v_i32m2(__VA_ARGS__) |
| #define vle32ff_v_i32m2_m | ( | ... | ) | __riscv_vle32ff_v_i32m2_tumu(__VA_ARGS__) |
| #define vle32ff_v_i32m4 | ( | ... | ) | __riscv_vle32ff_v_i32m4(__VA_ARGS__) |
| #define vle32ff_v_i32m4_m | ( | ... | ) | __riscv_vle32ff_v_i32m4_tumu(__VA_ARGS__) |
| #define vle32ff_v_i32m8 | ( | ... | ) | __riscv_vle32ff_v_i32m8(__VA_ARGS__) |
| #define vle32ff_v_i32m8_m | ( | ... | ) | __riscv_vle32ff_v_i32m8_tumu(__VA_ARGS__) |
| #define vle32ff_v_i32mf2 | ( | ... | ) | __riscv_vle32ff_v_i32mf2(__VA_ARGS__) |
| #define vle32ff_v_i32mf2_m | ( | ... | ) | __riscv_vle32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define vle32ff_v_u32m1 | ( | ... | ) | __riscv_vle32ff_v_u32m1(__VA_ARGS__) |
| #define vle32ff_v_u32m1_m | ( | ... | ) | __riscv_vle32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define vle32ff_v_u32m2 | ( | ... | ) | __riscv_vle32ff_v_u32m2(__VA_ARGS__) |
| #define vle32ff_v_u32m2_m | ( | ... | ) | __riscv_vle32ff_v_u32m2_tumu(__VA_ARGS__) |
| #define vle32ff_v_u32m4 | ( | ... | ) | __riscv_vle32ff_v_u32m4(__VA_ARGS__) |
| #define vle32ff_v_u32m4_m | ( | ... | ) | __riscv_vle32ff_v_u32m4_tumu(__VA_ARGS__) |
| #define vle32ff_v_u32m8 | ( | ... | ) | __riscv_vle32ff_v_u32m8(__VA_ARGS__) |
| #define vle32ff_v_u32m8_m | ( | ... | ) | __riscv_vle32ff_v_u32m8_tumu(__VA_ARGS__) |
| #define vle32ff_v_u32mf2 | ( | ... | ) | __riscv_vle32ff_v_u32mf2(__VA_ARGS__) |
| #define vle32ff_v_u32mf2_m | ( | ... | ) | __riscv_vle32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define vle64_v_f64m1 | ( | ... | ) | __riscv_vle64_v_f64m1(__VA_ARGS__) |
| #define vle64_v_f64m1_m | ( | ... | ) | __riscv_vle64_v_f64m1_tumu(__VA_ARGS__) |
| #define vle64_v_f64m2 | ( | ... | ) | __riscv_vle64_v_f64m2(__VA_ARGS__) |
| #define vle64_v_f64m2_m | ( | ... | ) | __riscv_vle64_v_f64m2_tumu(__VA_ARGS__) |
| #define vle64_v_f64m4 | ( | ... | ) | __riscv_vle64_v_f64m4(__VA_ARGS__) |
| #define vle64_v_f64m4_m | ( | ... | ) | __riscv_vle64_v_f64m4_tumu(__VA_ARGS__) |
| #define vle64_v_f64m8 | ( | ... | ) | __riscv_vle64_v_f64m8(__VA_ARGS__) |
| #define vle64_v_f64m8_m | ( | ... | ) | __riscv_vle64_v_f64m8_tumu(__VA_ARGS__) |
| #define vle64_v_i64m1 | ( | ... | ) | __riscv_vle64_v_i64m1(__VA_ARGS__) |
| #define vle64_v_i64m1_m | ( | ... | ) | __riscv_vle64_v_i64m1_tumu(__VA_ARGS__) |
| #define vle64_v_i64m2 | ( | ... | ) | __riscv_vle64_v_i64m2(__VA_ARGS__) |
| #define vle64_v_i64m2_m | ( | ... | ) | __riscv_vle64_v_i64m2_tumu(__VA_ARGS__) |
| #define vle64_v_i64m4 | ( | ... | ) | __riscv_vle64_v_i64m4(__VA_ARGS__) |
| #define vle64_v_i64m4_m | ( | ... | ) | __riscv_vle64_v_i64m4_tumu(__VA_ARGS__) |
| #define vle64_v_i64m8 | ( | ... | ) | __riscv_vle64_v_i64m8(__VA_ARGS__) |
| #define vle64_v_i64m8_m | ( | ... | ) | __riscv_vle64_v_i64m8_tumu(__VA_ARGS__) |
| #define vle64_v_u64m1 | ( | ... | ) | __riscv_vle64_v_u64m1(__VA_ARGS__) |
| #define vle64_v_u64m1_m | ( | ... | ) | __riscv_vle64_v_u64m1_tumu(__VA_ARGS__) |
| #define vle64_v_u64m2 | ( | ... | ) | __riscv_vle64_v_u64m2(__VA_ARGS__) |
| #define vle64_v_u64m2_m | ( | ... | ) | __riscv_vle64_v_u64m2_tumu(__VA_ARGS__) |
| #define vle64_v_u64m4 | ( | ... | ) | __riscv_vle64_v_u64m4(__VA_ARGS__) |
| #define vle64_v_u64m4_m | ( | ... | ) | __riscv_vle64_v_u64m4_tumu(__VA_ARGS__) |
| #define vle64_v_u64m8 | ( | ... | ) | __riscv_vle64_v_u64m8(__VA_ARGS__) |
| #define vle64_v_u64m8_m | ( | ... | ) | __riscv_vle64_v_u64m8_tumu(__VA_ARGS__) |
| #define vle64ff_v_f64m1 | ( | ... | ) | __riscv_vle64ff_v_f64m1(__VA_ARGS__) |
| #define vle64ff_v_f64m1_m | ( | ... | ) | __riscv_vle64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define vle64ff_v_f64m2 | ( | ... | ) | __riscv_vle64ff_v_f64m2(__VA_ARGS__) |
| #define vle64ff_v_f64m2_m | ( | ... | ) | __riscv_vle64ff_v_f64m2_tumu(__VA_ARGS__) |
| #define vle64ff_v_f64m4 | ( | ... | ) | __riscv_vle64ff_v_f64m4(__VA_ARGS__) |
| #define vle64ff_v_f64m4_m | ( | ... | ) | __riscv_vle64ff_v_f64m4_tumu(__VA_ARGS__) |
| #define vle64ff_v_f64m8 | ( | ... | ) | __riscv_vle64ff_v_f64m8(__VA_ARGS__) |
| #define vle64ff_v_f64m8_m | ( | ... | ) | __riscv_vle64ff_v_f64m8_tumu(__VA_ARGS__) |
| #define vle64ff_v_i64m1 | ( | ... | ) | __riscv_vle64ff_v_i64m1(__VA_ARGS__) |
| #define vle64ff_v_i64m1_m | ( | ... | ) | __riscv_vle64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define vle64ff_v_i64m2 | ( | ... | ) | __riscv_vle64ff_v_i64m2(__VA_ARGS__) |
| #define vle64ff_v_i64m2_m | ( | ... | ) | __riscv_vle64ff_v_i64m2_tumu(__VA_ARGS__) |
| #define vle64ff_v_i64m4 | ( | ... | ) | __riscv_vle64ff_v_i64m4(__VA_ARGS__) |
| #define vle64ff_v_i64m4_m | ( | ... | ) | __riscv_vle64ff_v_i64m4_tumu(__VA_ARGS__) |
| #define vle64ff_v_i64m8 | ( | ... | ) | __riscv_vle64ff_v_i64m8(__VA_ARGS__) |
| #define vle64ff_v_i64m8_m | ( | ... | ) | __riscv_vle64ff_v_i64m8_tumu(__VA_ARGS__) |
| #define vle64ff_v_u64m1 | ( | ... | ) | __riscv_vle64ff_v_u64m1(__VA_ARGS__) |
| #define vle64ff_v_u64m1_m | ( | ... | ) | __riscv_vle64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define vle64ff_v_u64m2 | ( | ... | ) | __riscv_vle64ff_v_u64m2(__VA_ARGS__) |
| #define vle64ff_v_u64m2_m | ( | ... | ) | __riscv_vle64ff_v_u64m2_tumu(__VA_ARGS__) |
| #define vle64ff_v_u64m4 | ( | ... | ) | __riscv_vle64ff_v_u64m4(__VA_ARGS__) |
| #define vle64ff_v_u64m4_m | ( | ... | ) | __riscv_vle64ff_v_u64m4_tumu(__VA_ARGS__) |
| #define vle64ff_v_u64m8 | ( | ... | ) | __riscv_vle64ff_v_u64m8(__VA_ARGS__) |
| #define vle64ff_v_u64m8_m | ( | ... | ) | __riscv_vle64ff_v_u64m8_tumu(__VA_ARGS__) |
| #define vle8_v_i8m1 | ( | ... | ) | __riscv_vle8_v_i8m1(__VA_ARGS__) |
| #define vle8_v_i8m1_m | ( | ... | ) | __riscv_vle8_v_i8m1_tumu(__VA_ARGS__) |
| #define vle8_v_i8m2 | ( | ... | ) | __riscv_vle8_v_i8m2(__VA_ARGS__) |
| #define vle8_v_i8m2_m | ( | ... | ) | __riscv_vle8_v_i8m2_tumu(__VA_ARGS__) |
| #define vle8_v_i8m4 | ( | ... | ) | __riscv_vle8_v_i8m4(__VA_ARGS__) |
| #define vle8_v_i8m4_m | ( | ... | ) | __riscv_vle8_v_i8m4_tumu(__VA_ARGS__) |
| #define vle8_v_i8m8 | ( | ... | ) | __riscv_vle8_v_i8m8(__VA_ARGS__) |
| #define vle8_v_i8m8_m | ( | ... | ) | __riscv_vle8_v_i8m8_tumu(__VA_ARGS__) |
| #define vle8_v_i8mf2 | ( | ... | ) | __riscv_vle8_v_i8mf2(__VA_ARGS__) |
| #define vle8_v_i8mf2_m | ( | ... | ) | __riscv_vle8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vle8_v_i8mf4 | ( | ... | ) | __riscv_vle8_v_i8mf4(__VA_ARGS__) |
| #define vle8_v_i8mf4_m | ( | ... | ) | __riscv_vle8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vle8_v_i8mf8 | ( | ... | ) | __riscv_vle8_v_i8mf8(__VA_ARGS__) |
| #define vle8_v_i8mf8_m | ( | ... | ) | __riscv_vle8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vle8_v_u8m1 | ( | ... | ) | __riscv_vle8_v_u8m1(__VA_ARGS__) |
| #define vle8_v_u8m1_m | ( | ... | ) | __riscv_vle8_v_u8m1_tumu(__VA_ARGS__) |
| #define vle8_v_u8m2 | ( | ... | ) | __riscv_vle8_v_u8m2(__VA_ARGS__) |
| #define vle8_v_u8m2_m | ( | ... | ) | __riscv_vle8_v_u8m2_tumu(__VA_ARGS__) |
| #define vle8_v_u8m4 | ( | ... | ) | __riscv_vle8_v_u8m4(__VA_ARGS__) |
| #define vle8_v_u8m4_m | ( | ... | ) | __riscv_vle8_v_u8m4_tumu(__VA_ARGS__) |
| #define vle8_v_u8m8 | ( | ... | ) | __riscv_vle8_v_u8m8(__VA_ARGS__) |
| #define vle8_v_u8m8_m | ( | ... | ) | __riscv_vle8_v_u8m8_tumu(__VA_ARGS__) |
| #define vle8_v_u8mf2 | ( | ... | ) | __riscv_vle8_v_u8mf2(__VA_ARGS__) |
| #define vle8_v_u8mf2_m | ( | ... | ) | __riscv_vle8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vle8_v_u8mf4 | ( | ... | ) | __riscv_vle8_v_u8mf4(__VA_ARGS__) |
| #define vle8_v_u8mf4_m | ( | ... | ) | __riscv_vle8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vle8_v_u8mf8 | ( | ... | ) | __riscv_vle8_v_u8mf8(__VA_ARGS__) |
| #define vle8_v_u8mf8_m | ( | ... | ) | __riscv_vle8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vle8ff_v_i8m1 | ( | ... | ) | __riscv_vle8ff_v_i8m1(__VA_ARGS__) |
| #define vle8ff_v_i8m1_m | ( | ... | ) | __riscv_vle8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define vle8ff_v_i8m2 | ( | ... | ) | __riscv_vle8ff_v_i8m2(__VA_ARGS__) |
| #define vle8ff_v_i8m2_m | ( | ... | ) | __riscv_vle8ff_v_i8m2_tumu(__VA_ARGS__) |
| #define vle8ff_v_i8m4 | ( | ... | ) | __riscv_vle8ff_v_i8m4(__VA_ARGS__) |
| #define vle8ff_v_i8m4_m | ( | ... | ) | __riscv_vle8ff_v_i8m4_tumu(__VA_ARGS__) |
| #define vle8ff_v_i8m8 | ( | ... | ) | __riscv_vle8ff_v_i8m8(__VA_ARGS__) |
| #define vle8ff_v_i8m8_m | ( | ... | ) | __riscv_vle8ff_v_i8m8_tumu(__VA_ARGS__) |
| #define vle8ff_v_i8mf2 | ( | ... | ) | __riscv_vle8ff_v_i8mf2(__VA_ARGS__) |
| #define vle8ff_v_i8mf2_m | ( | ... | ) | __riscv_vle8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define vle8ff_v_i8mf4 | ( | ... | ) | __riscv_vle8ff_v_i8mf4(__VA_ARGS__) |
| #define vle8ff_v_i8mf4_m | ( | ... | ) | __riscv_vle8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define vle8ff_v_i8mf8 | ( | ... | ) | __riscv_vle8ff_v_i8mf8(__VA_ARGS__) |
| #define vle8ff_v_i8mf8_m | ( | ... | ) | __riscv_vle8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define vle8ff_v_u8m1 | ( | ... | ) | __riscv_vle8ff_v_u8m1(__VA_ARGS__) |
| #define vle8ff_v_u8m1_m | ( | ... | ) | __riscv_vle8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define vle8ff_v_u8m2 | ( | ... | ) | __riscv_vle8ff_v_u8m2(__VA_ARGS__) |
| #define vle8ff_v_u8m2_m | ( | ... | ) | __riscv_vle8ff_v_u8m2_tumu(__VA_ARGS__) |
| #define vle8ff_v_u8m4 | ( | ... | ) | __riscv_vle8ff_v_u8m4(__VA_ARGS__) |
| #define vle8ff_v_u8m4_m | ( | ... | ) | __riscv_vle8ff_v_u8m4_tumu(__VA_ARGS__) |
| #define vle8ff_v_u8m8 | ( | ... | ) | __riscv_vle8ff_v_u8m8(__VA_ARGS__) |
| #define vle8ff_v_u8m8_m | ( | ... | ) | __riscv_vle8ff_v_u8m8_tumu(__VA_ARGS__) |
| #define vle8ff_v_u8mf2 | ( | ... | ) | __riscv_vle8ff_v_u8mf2(__VA_ARGS__) |
| #define vle8ff_v_u8mf2_m | ( | ... | ) | __riscv_vle8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define vle8ff_v_u8mf4 | ( | ... | ) | __riscv_vle8ff_v_u8mf4(__VA_ARGS__) |
| #define vle8ff_v_u8mf4_m | ( | ... | ) | __riscv_vle8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define vle8ff_v_u8mf8 | ( | ... | ) | __riscv_vle8ff_v_u8mf8(__VA_ARGS__) |
| #define vle8ff_v_u8mf8_m | ( | ... | ) | __riscv_vle8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlm_v_b1 | ( | ... | ) | __riscv_vlm_v_b1(__VA_ARGS__) |
| #define vlm_v_b16 | ( | ... | ) | __riscv_vlm_v_b16(__VA_ARGS__) |
| #define vlm_v_b2 | ( | ... | ) | __riscv_vlm_v_b2(__VA_ARGS__) |
| #define vlm_v_b32 | ( | ... | ) | __riscv_vlm_v_b32(__VA_ARGS__) |
| #define vlm_v_b4 | ( | ... | ) | __riscv_vlm_v_b4(__VA_ARGS__) |
| #define vlm_v_b64 | ( | ... | ) | __riscv_vlm_v_b64(__VA_ARGS__) |
| #define vlm_v_b8 | ( | ... | ) | __riscv_vlm_v_b8(__VA_ARGS__) |
| #define vlmul_ext_v_f16m1_f16m2 | ( | ... | ) | __riscv_vlmul_ext_v_f16m1_f16m2(__VA_ARGS__) |
| #define vlmul_ext_v_f16m1_f16m4 | ( | ... | ) | __riscv_vlmul_ext_v_f16m1_f16m4(__VA_ARGS__) |
| #define vlmul_ext_v_f16m1_f16m8 | ( | ... | ) | __riscv_vlmul_ext_v_f16m1_f16m8(__VA_ARGS__) |
| #define vlmul_ext_v_f16m2_f16m4 | ( | ... | ) | __riscv_vlmul_ext_v_f16m2_f16m4(__VA_ARGS__) |
| #define vlmul_ext_v_f16m2_f16m8 | ( | ... | ) | __riscv_vlmul_ext_v_f16m2_f16m8(__VA_ARGS__) |
| #define vlmul_ext_v_f16m4_f16m8 | ( | ... | ) | __riscv_vlmul_ext_v_f16m4_f16m8(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf2_f16m1 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf2_f16m1(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf2_f16m2 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf2_f16m2(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf2_f16m4 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf2_f16m4(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf2_f16m8 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf2_f16m8(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf4_f16m1 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf4_f16m1(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf4_f16m2 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf4_f16m2(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf4_f16m4 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf4_f16m4(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf4_f16m8 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf4_f16m8(__VA_ARGS__) |
| #define vlmul_ext_v_f16mf4_f16mf2 | ( | ... | ) | __riscv_vlmul_ext_v_f16mf4_f16mf2(__VA_ARGS__) |
| #define vlmul_ext_v_f32m1_f32m2 | ( | ... | ) | __riscv_vlmul_ext_v_f32m1_f32m2(__VA_ARGS__) |
| #define vlmul_ext_v_f32m1_f32m4 | ( | ... | ) | __riscv_vlmul_ext_v_f32m1_f32m4(__VA_ARGS__) |
| #define vlmul_ext_v_f32m1_f32m8 | ( | ... | ) | __riscv_vlmul_ext_v_f32m1_f32m8(__VA_ARGS__) |
| #define vlmul_ext_v_f32m2_f32m4 | ( | ... | ) | __riscv_vlmul_ext_v_f32m2_f32m4(__VA_ARGS__) |
| #define vlmul_ext_v_f32m2_f32m8 | ( | ... | ) | __riscv_vlmul_ext_v_f32m2_f32m8(__VA_ARGS__) |
| #define vlmul_ext_v_f32m4_f32m8 | ( | ... | ) | __riscv_vlmul_ext_v_f32m4_f32m8(__VA_ARGS__) |
| #define vlmul_ext_v_f32mf2_f32m1 | ( | ... | ) | __riscv_vlmul_ext_v_f32mf2_f32m1(__VA_ARGS__) |
| #define vlmul_ext_v_f32mf2_f32m2 | ( | ... | ) | __riscv_vlmul_ext_v_f32mf2_f32m2(__VA_ARGS__) |
| #define vlmul_ext_v_f32mf2_f32m4 | ( | ... | ) | __riscv_vlmul_ext_v_f32mf2_f32m4(__VA_ARGS__) |
| #define vlmul_ext_v_f32mf2_f32m8 | ( | ... | ) | __riscv_vlmul_ext_v_f32mf2_f32m8(__VA_ARGS__) |
| #define vlmul_ext_v_f64m1_f64m2 | ( | ... | ) | __riscv_vlmul_ext_v_f64m1_f64m2(__VA_ARGS__) |
| #define vlmul_ext_v_f64m1_f64m4 | ( | ... | ) | __riscv_vlmul_ext_v_f64m1_f64m4(__VA_ARGS__) |
| #define vlmul_ext_v_f64m1_f64m8 | ( | ... | ) | __riscv_vlmul_ext_v_f64m1_f64m8(__VA_ARGS__) |
| #define vlmul_ext_v_f64m2_f64m4 | ( | ... | ) | __riscv_vlmul_ext_v_f64m2_f64m4(__VA_ARGS__) |
| #define vlmul_ext_v_f64m2_f64m8 | ( | ... | ) | __riscv_vlmul_ext_v_f64m2_f64m8(__VA_ARGS__) |
| #define vlmul_ext_v_f64m4_f64m8 | ( | ... | ) | __riscv_vlmul_ext_v_f64m4_f64m8(__VA_ARGS__) |
| #define vlmul_ext_v_i16m1_i16m2 | ( | ... | ) | __riscv_vlmul_ext_v_i16m1_i16m2(__VA_ARGS__) |
| #define vlmul_ext_v_i16m1_i16m4 | ( | ... | ) | __riscv_vlmul_ext_v_i16m1_i16m4(__VA_ARGS__) |
| #define vlmul_ext_v_i16m1_i16m8 | ( | ... | ) | __riscv_vlmul_ext_v_i16m1_i16m8(__VA_ARGS__) |
| #define vlmul_ext_v_i16m2_i16m4 | ( | ... | ) | __riscv_vlmul_ext_v_i16m2_i16m4(__VA_ARGS__) |
| #define vlmul_ext_v_i16m2_i16m8 | ( | ... | ) | __riscv_vlmul_ext_v_i16m2_i16m8(__VA_ARGS__) |
| #define vlmul_ext_v_i16m4_i16m8 | ( | ... | ) | __riscv_vlmul_ext_v_i16m4_i16m8(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf2_i16m1 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf2_i16m1(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf2_i16m2 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf2_i16m2(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf2_i16m4 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf2_i16m4(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf2_i16m8 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf2_i16m8(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf4_i16m1 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf4_i16m1(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf4_i16m2 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf4_i16m2(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf4_i16m4 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf4_i16m4(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf4_i16m8 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf4_i16m8(__VA_ARGS__) |
| #define vlmul_ext_v_i16mf4_i16mf2 | ( | ... | ) | __riscv_vlmul_ext_v_i16mf4_i16mf2(__VA_ARGS__) |
| #define vlmul_ext_v_i32m1_i32m2 | ( | ... | ) | __riscv_vlmul_ext_v_i32m1_i32m2(__VA_ARGS__) |
| #define vlmul_ext_v_i32m1_i32m4 | ( | ... | ) | __riscv_vlmul_ext_v_i32m1_i32m4(__VA_ARGS__) |
| #define vlmul_ext_v_i32m1_i32m8 | ( | ... | ) | __riscv_vlmul_ext_v_i32m1_i32m8(__VA_ARGS__) |
| #define vlmul_ext_v_i32m2_i32m4 | ( | ... | ) | __riscv_vlmul_ext_v_i32m2_i32m4(__VA_ARGS__) |
| #define vlmul_ext_v_i32m2_i32m8 | ( | ... | ) | __riscv_vlmul_ext_v_i32m2_i32m8(__VA_ARGS__) |
| #define vlmul_ext_v_i32m4_i32m8 | ( | ... | ) | __riscv_vlmul_ext_v_i32m4_i32m8(__VA_ARGS__) |
| #define vlmul_ext_v_i32mf2_i32m1 | ( | ... | ) | __riscv_vlmul_ext_v_i32mf2_i32m1(__VA_ARGS__) |
| #define vlmul_ext_v_i32mf2_i32m2 | ( | ... | ) | __riscv_vlmul_ext_v_i32mf2_i32m2(__VA_ARGS__) |
| #define vlmul_ext_v_i32mf2_i32m4 | ( | ... | ) | __riscv_vlmul_ext_v_i32mf2_i32m4(__VA_ARGS__) |
| #define vlmul_ext_v_i32mf2_i32m8 | ( | ... | ) | __riscv_vlmul_ext_v_i32mf2_i32m8(__VA_ARGS__) |
| #define vlmul_ext_v_i64m1_i64m2 | ( | ... | ) | __riscv_vlmul_ext_v_i64m1_i64m2(__VA_ARGS__) |
| #define vlmul_ext_v_i64m1_i64m4 | ( | ... | ) | __riscv_vlmul_ext_v_i64m1_i64m4(__VA_ARGS__) |
| #define vlmul_ext_v_i64m1_i64m8 | ( | ... | ) | __riscv_vlmul_ext_v_i64m1_i64m8(__VA_ARGS__) |
| #define vlmul_ext_v_i64m2_i64m4 | ( | ... | ) | __riscv_vlmul_ext_v_i64m2_i64m4(__VA_ARGS__) |
| #define vlmul_ext_v_i64m2_i64m8 | ( | ... | ) | __riscv_vlmul_ext_v_i64m2_i64m8(__VA_ARGS__) |
| #define vlmul_ext_v_i64m4_i64m8 | ( | ... | ) | __riscv_vlmul_ext_v_i64m4_i64m8(__VA_ARGS__) |
| #define vlmul_ext_v_i8m1_i8m2 | ( | ... | ) | __riscv_vlmul_ext_v_i8m1_i8m2(__VA_ARGS__) |
| #define vlmul_ext_v_i8m1_i8m4 | ( | ... | ) | __riscv_vlmul_ext_v_i8m1_i8m4(__VA_ARGS__) |
| #define vlmul_ext_v_i8m1_i8m8 | ( | ... | ) | __riscv_vlmul_ext_v_i8m1_i8m8(__VA_ARGS__) |
| #define vlmul_ext_v_i8m2_i8m4 | ( | ... | ) | __riscv_vlmul_ext_v_i8m2_i8m4(__VA_ARGS__) |
| #define vlmul_ext_v_i8m2_i8m8 | ( | ... | ) | __riscv_vlmul_ext_v_i8m2_i8m8(__VA_ARGS__) |
| #define vlmul_ext_v_i8m4_i8m8 | ( | ... | ) | __riscv_vlmul_ext_v_i8m4_i8m8(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf2_i8m1 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf2_i8m1(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf2_i8m2 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf2_i8m2(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf2_i8m4 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf2_i8m4(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf2_i8m8 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf2_i8m8(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf4_i8m1 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf4_i8m1(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf4_i8m2 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf4_i8m2(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf4_i8m4 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf4_i8m4(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf4_i8m8 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf4_i8m8(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf4_i8mf2 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf4_i8mf2(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf8_i8m1 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf8_i8m1(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf8_i8m2 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf8_i8m2(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf8_i8m4 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf8_i8m4(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf8_i8m8 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf8_i8m8(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf8_i8mf2 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf8_i8mf2(__VA_ARGS__) |
| #define vlmul_ext_v_i8mf8_i8mf4 | ( | ... | ) | __riscv_vlmul_ext_v_i8mf8_i8mf4(__VA_ARGS__) |
| #define vlmul_ext_v_u16m1_u16m2 | ( | ... | ) | __riscv_vlmul_ext_v_u16m1_u16m2(__VA_ARGS__) |
| #define vlmul_ext_v_u16m1_u16m4 | ( | ... | ) | __riscv_vlmul_ext_v_u16m1_u16m4(__VA_ARGS__) |
| #define vlmul_ext_v_u16m1_u16m8 | ( | ... | ) | __riscv_vlmul_ext_v_u16m1_u16m8(__VA_ARGS__) |
| #define vlmul_ext_v_u16m2_u16m4 | ( | ... | ) | __riscv_vlmul_ext_v_u16m2_u16m4(__VA_ARGS__) |
| #define vlmul_ext_v_u16m2_u16m8 | ( | ... | ) | __riscv_vlmul_ext_v_u16m2_u16m8(__VA_ARGS__) |
| #define vlmul_ext_v_u16m4_u16m8 | ( | ... | ) | __riscv_vlmul_ext_v_u16m4_u16m8(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf2_u16m1 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf2_u16m1(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf2_u16m2 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf2_u16m2(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf2_u16m4 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf2_u16m4(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf2_u16m8 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf2_u16m8(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf4_u16m1 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf4_u16m1(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf4_u16m2 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf4_u16m2(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf4_u16m4 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf4_u16m4(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf4_u16m8 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf4_u16m8(__VA_ARGS__) |
| #define vlmul_ext_v_u16mf4_u16mf2 | ( | ... | ) | __riscv_vlmul_ext_v_u16mf4_u16mf2(__VA_ARGS__) |
| #define vlmul_ext_v_u32m1_u32m2 | ( | ... | ) | __riscv_vlmul_ext_v_u32m1_u32m2(__VA_ARGS__) |
| #define vlmul_ext_v_u32m1_u32m4 | ( | ... | ) | __riscv_vlmul_ext_v_u32m1_u32m4(__VA_ARGS__) |
| #define vlmul_ext_v_u32m1_u32m8 | ( | ... | ) | __riscv_vlmul_ext_v_u32m1_u32m8(__VA_ARGS__) |
| #define vlmul_ext_v_u32m2_u32m4 | ( | ... | ) | __riscv_vlmul_ext_v_u32m2_u32m4(__VA_ARGS__) |
| #define vlmul_ext_v_u32m2_u32m8 | ( | ... | ) | __riscv_vlmul_ext_v_u32m2_u32m8(__VA_ARGS__) |
| #define vlmul_ext_v_u32m4_u32m8 | ( | ... | ) | __riscv_vlmul_ext_v_u32m4_u32m8(__VA_ARGS__) |
| #define vlmul_ext_v_u32mf2_u32m1 | ( | ... | ) | __riscv_vlmul_ext_v_u32mf2_u32m1(__VA_ARGS__) |
| #define vlmul_ext_v_u32mf2_u32m2 | ( | ... | ) | __riscv_vlmul_ext_v_u32mf2_u32m2(__VA_ARGS__) |
| #define vlmul_ext_v_u32mf2_u32m4 | ( | ... | ) | __riscv_vlmul_ext_v_u32mf2_u32m4(__VA_ARGS__) |
| #define vlmul_ext_v_u32mf2_u32m8 | ( | ... | ) | __riscv_vlmul_ext_v_u32mf2_u32m8(__VA_ARGS__) |
| #define vlmul_ext_v_u64m1_u64m2 | ( | ... | ) | __riscv_vlmul_ext_v_u64m1_u64m2(__VA_ARGS__) |
| #define vlmul_ext_v_u64m1_u64m4 | ( | ... | ) | __riscv_vlmul_ext_v_u64m1_u64m4(__VA_ARGS__) |
| #define vlmul_ext_v_u64m1_u64m8 | ( | ... | ) | __riscv_vlmul_ext_v_u64m1_u64m8(__VA_ARGS__) |
| #define vlmul_ext_v_u64m2_u64m4 | ( | ... | ) | __riscv_vlmul_ext_v_u64m2_u64m4(__VA_ARGS__) |
| #define vlmul_ext_v_u64m2_u64m8 | ( | ... | ) | __riscv_vlmul_ext_v_u64m2_u64m8(__VA_ARGS__) |
| #define vlmul_ext_v_u64m4_u64m8 | ( | ... | ) | __riscv_vlmul_ext_v_u64m4_u64m8(__VA_ARGS__) |
| #define vlmul_ext_v_u8m1_u8m2 | ( | ... | ) | __riscv_vlmul_ext_v_u8m1_u8m2(__VA_ARGS__) |
| #define vlmul_ext_v_u8m1_u8m4 | ( | ... | ) | __riscv_vlmul_ext_v_u8m1_u8m4(__VA_ARGS__) |
| #define vlmul_ext_v_u8m1_u8m8 | ( | ... | ) | __riscv_vlmul_ext_v_u8m1_u8m8(__VA_ARGS__) |
| #define vlmul_ext_v_u8m2_u8m4 | ( | ... | ) | __riscv_vlmul_ext_v_u8m2_u8m4(__VA_ARGS__) |
| #define vlmul_ext_v_u8m2_u8m8 | ( | ... | ) | __riscv_vlmul_ext_v_u8m2_u8m8(__VA_ARGS__) |
| #define vlmul_ext_v_u8m4_u8m8 | ( | ... | ) | __riscv_vlmul_ext_v_u8m4_u8m8(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf2_u8m1 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf2_u8m1(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf2_u8m2 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf2_u8m2(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf2_u8m4 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf2_u8m4(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf2_u8m8 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf2_u8m8(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf4_u8m1 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf4_u8m1(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf4_u8m2 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf4_u8m2(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf4_u8m4 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf4_u8m4(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf4_u8m8 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf4_u8m8(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf4_u8mf2 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf4_u8mf2(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf8_u8m1 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf8_u8m1(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf8_u8m2 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf8_u8m2(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf8_u8m4 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf8_u8m4(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf8_u8m8 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf8_u8m8(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf8_u8mf2 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf8_u8mf2(__VA_ARGS__) |
| #define vlmul_ext_v_u8mf8_u8mf4 | ( | ... | ) | __riscv_vlmul_ext_v_u8mf8_u8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m1_f16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m1_f16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m1_f16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m1_f16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m2_f16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m2_f16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m2_f16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m2_f16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m2_f16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m2_f16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m4_f16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m4_f16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m4_f16m2 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m4_f16m2(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m4_f16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m4_f16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m4_f16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m4_f16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m8_f16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m8_f16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m8_f16m2 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m8_f16m2(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m8_f16m4 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m8_f16m4(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m8_f16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m8_f16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_f16m8_f16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_f16m8_f16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_f16mf2_f16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_f16mf2_f16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m1_f32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m1_f32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m2_f32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m2_f32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m2_f32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m2_f32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m4_f32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m4_f32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m4_f32m2 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m4_f32m2(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m4_f32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m4_f32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m8_f32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m8_f32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m8_f32m2 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m8_f32m2(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m8_f32m4 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m8_f32m4(__VA_ARGS__) |
| #define vlmul_trunc_v_f32m8_f32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_f32m8_f32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_f64m2_f64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f64m2_f64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f64m4_f64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f64m4_f64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f64m4_f64m2 | ( | ... | ) | __riscv_vlmul_trunc_v_f64m4_f64m2(__VA_ARGS__) |
| #define vlmul_trunc_v_f64m8_f64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_f64m8_f64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_f64m8_f64m2 | ( | ... | ) | __riscv_vlmul_trunc_v_f64m8_f64m2(__VA_ARGS__) |
| #define vlmul_trunc_v_f64m8_f64m4 | ( | ... | ) | __riscv_vlmul_trunc_v_f64m8_f64m4(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m1_i16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m1_i16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m1_i16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m1_i16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m2_i16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m2_i16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m2_i16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m2_i16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m2_i16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m2_i16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m4_i16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m4_i16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m4_i16m2 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m4_i16m2(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m4_i16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m4_i16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m4_i16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m4_i16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m8_i16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m8_i16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m8_i16m2 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m8_i16m2(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m8_i16m4 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m8_i16m4(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m8_i16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m8_i16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i16m8_i16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i16m8_i16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i16mf2_i16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i16mf2_i16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m1_i32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m1_i32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m2_i32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m2_i32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m2_i32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m2_i32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m4_i32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m4_i32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m4_i32m2 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m4_i32m2(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m4_i32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m4_i32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m8_i32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m8_i32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m8_i32m2 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m8_i32m2(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m8_i32m4 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m8_i32m4(__VA_ARGS__) |
| #define vlmul_trunc_v_i32m8_i32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i32m8_i32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i64m2_i64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i64m2_i64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i64m4_i64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i64m4_i64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i64m4_i64m2 | ( | ... | ) | __riscv_vlmul_trunc_v_i64m4_i64m2(__VA_ARGS__) |
| #define vlmul_trunc_v_i64m8_i64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i64m8_i64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i64m8_i64m2 | ( | ... | ) | __riscv_vlmul_trunc_v_i64m8_i64m2(__VA_ARGS__) |
| #define vlmul_trunc_v_i64m8_i64m4 | ( | ... | ) | __riscv_vlmul_trunc_v_i64m8_i64m4(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m1_i8mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m1_i8mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m1_i8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m1_i8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m1_i8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m1_i8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m2_i8m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m2_i8m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m2_i8mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m2_i8mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m2_i8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m2_i8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m2_i8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m2_i8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m4_i8m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m4_i8m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m4_i8m2 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m4_i8m2(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m4_i8mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m4_i8mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m4_i8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m4_i8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m4_i8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m4_i8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m8_i8m1 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m8_i8m1(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m8_i8m2 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m8_i8m2(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m8_i8m4 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m8_i8m4(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m8_i8mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m8_i8mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m8_i8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m8_i8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i8m8_i8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_i8m8_i8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_i8mf2_i8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_i8mf2_i8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_i8mf2_i8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_i8mf2_i8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_i8mf4_i8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_i8mf4_i8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m1_u16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m1_u16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m1_u16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m1_u16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m2_u16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m2_u16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m2_u16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m2_u16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m2_u16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m2_u16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m4_u16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m4_u16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m4_u16m2 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m4_u16m2(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m4_u16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m4_u16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m4_u16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m4_u16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m8_u16m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m8_u16m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m8_u16m2 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m8_u16m2(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m8_u16m4 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m8_u16m4(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m8_u16mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m8_u16mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u16m8_u16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u16m8_u16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u16mf2_u16mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u16mf2_u16mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m1_u32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m1_u32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m2_u32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m2_u32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m2_u32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m2_u32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m4_u32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m4_u32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m4_u32m2 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m4_u32m2(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m4_u32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m4_u32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m8_u32m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m8_u32m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m8_u32m2 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m8_u32m2(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m8_u32m4 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m8_u32m4(__VA_ARGS__) |
| #define vlmul_trunc_v_u32m8_u32mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u32m8_u32mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u64m2_u64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u64m2_u64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u64m4_u64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u64m4_u64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u64m4_u64m2 | ( | ... | ) | __riscv_vlmul_trunc_v_u64m4_u64m2(__VA_ARGS__) |
| #define vlmul_trunc_v_u64m8_u64m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u64m8_u64m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u64m8_u64m2 | ( | ... | ) | __riscv_vlmul_trunc_v_u64m8_u64m2(__VA_ARGS__) |
| #define vlmul_trunc_v_u64m8_u64m4 | ( | ... | ) | __riscv_vlmul_trunc_v_u64m8_u64m4(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m1_u8mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m1_u8mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m1_u8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m1_u8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m1_u8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m1_u8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m2_u8m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m2_u8m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m2_u8mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m2_u8mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m2_u8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m2_u8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m2_u8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m2_u8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m4_u8m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m4_u8m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m4_u8m2 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m4_u8m2(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m4_u8mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m4_u8mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m4_u8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m4_u8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m4_u8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m4_u8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m8_u8m1 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m8_u8m1(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m8_u8m2 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m8_u8m2(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m8_u8m4 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m8_u8m4(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m8_u8mf2 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m8_u8mf2(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m8_u8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m8_u8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u8m8_u8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_u8m8_u8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_u8mf2_u8mf4 | ( | ... | ) | __riscv_vlmul_trunc_v_u8mf2_u8mf4(__VA_ARGS__) |
| #define vlmul_trunc_v_u8mf2_u8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_u8mf2_u8mf8(__VA_ARGS__) |
| #define vlmul_trunc_v_u8mf4_u8mf8 | ( | ... | ) | __riscv_vlmul_trunc_v_u8mf4_u8mf8(__VA_ARGS__) |
| #define vloxei16_v_f16m1 | ( | ... | ) | __riscv_vloxei16_v_f16m1(__VA_ARGS__) |
| #define vloxei16_v_f16m1_m | ( | ... | ) | __riscv_vloxei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_f16m2 | ( | ... | ) | __riscv_vloxei16_v_f16m2(__VA_ARGS__) |
| #define vloxei16_v_f16m2_m | ( | ... | ) | __riscv_vloxei16_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_f16m4 | ( | ... | ) | __riscv_vloxei16_v_f16m4(__VA_ARGS__) |
| #define vloxei16_v_f16m4_m | ( | ... | ) | __riscv_vloxei16_v_f16m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_f16m8 | ( | ... | ) | __riscv_vloxei16_v_f16m8(__VA_ARGS__) |
| #define vloxei16_v_f16m8_m | ( | ... | ) | __riscv_vloxei16_v_f16m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_f16mf2 | ( | ... | ) | __riscv_vloxei16_v_f16mf2(__VA_ARGS__) |
| #define vloxei16_v_f16mf2_m | ( | ... | ) | __riscv_vloxei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxei16_v_f16mf4 | ( | ... | ) | __riscv_vloxei16_v_f16mf4(__VA_ARGS__) |
| #define vloxei16_v_f16mf4_m | ( | ... | ) | __riscv_vloxei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxei16_v_f32m1 | ( | ... | ) | __riscv_vloxei16_v_f32m1(__VA_ARGS__) |
| #define vloxei16_v_f32m1_m | ( | ... | ) | __riscv_vloxei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_f32m2 | ( | ... | ) | __riscv_vloxei16_v_f32m2(__VA_ARGS__) |
| #define vloxei16_v_f32m2_m | ( | ... | ) | __riscv_vloxei16_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_f32m4 | ( | ... | ) | __riscv_vloxei16_v_f32m4(__VA_ARGS__) |
| #define vloxei16_v_f32m4_m | ( | ... | ) | __riscv_vloxei16_v_f32m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_f32m8 | ( | ... | ) | __riscv_vloxei16_v_f32m8(__VA_ARGS__) |
| #define vloxei16_v_f32m8_m | ( | ... | ) | __riscv_vloxei16_v_f32m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_f32mf2 | ( | ... | ) | __riscv_vloxei16_v_f32mf2(__VA_ARGS__) |
| #define vloxei16_v_f32mf2_m | ( | ... | ) | __riscv_vloxei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxei16_v_f64m1 | ( | ... | ) | __riscv_vloxei16_v_f64m1(__VA_ARGS__) |
| #define vloxei16_v_f64m1_m | ( | ... | ) | __riscv_vloxei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_f64m2 | ( | ... | ) | __riscv_vloxei16_v_f64m2(__VA_ARGS__) |
| #define vloxei16_v_f64m2_m | ( | ... | ) | __riscv_vloxei16_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_f64m4 | ( | ... | ) | __riscv_vloxei16_v_f64m4(__VA_ARGS__) |
| #define vloxei16_v_f64m4_m | ( | ... | ) | __riscv_vloxei16_v_f64m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_f64m8 | ( | ... | ) | __riscv_vloxei16_v_f64m8(__VA_ARGS__) |
| #define vloxei16_v_f64m8_m | ( | ... | ) | __riscv_vloxei16_v_f64m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_i16m1 | ( | ... | ) | __riscv_vloxei16_v_i16m1(__VA_ARGS__) |
| #define vloxei16_v_i16m1_m | ( | ... | ) | __riscv_vloxei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_i16m2 | ( | ... | ) | __riscv_vloxei16_v_i16m2(__VA_ARGS__) |
| #define vloxei16_v_i16m2_m | ( | ... | ) | __riscv_vloxei16_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_i16m4 | ( | ... | ) | __riscv_vloxei16_v_i16m4(__VA_ARGS__) |
| #define vloxei16_v_i16m4_m | ( | ... | ) | __riscv_vloxei16_v_i16m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_i16m8 | ( | ... | ) | __riscv_vloxei16_v_i16m8(__VA_ARGS__) |
| #define vloxei16_v_i16m8_m | ( | ... | ) | __riscv_vloxei16_v_i16m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_i16mf2 | ( | ... | ) | __riscv_vloxei16_v_i16mf2(__VA_ARGS__) |
| #define vloxei16_v_i16mf2_m | ( | ... | ) | __riscv_vloxei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxei16_v_i16mf4 | ( | ... | ) | __riscv_vloxei16_v_i16mf4(__VA_ARGS__) |
| #define vloxei16_v_i16mf4_m | ( | ... | ) | __riscv_vloxei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxei16_v_i32m1 | ( | ... | ) | __riscv_vloxei16_v_i32m1(__VA_ARGS__) |
| #define vloxei16_v_i32m1_m | ( | ... | ) | __riscv_vloxei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_i32m2 | ( | ... | ) | __riscv_vloxei16_v_i32m2(__VA_ARGS__) |
| #define vloxei16_v_i32m2_m | ( | ... | ) | __riscv_vloxei16_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_i32m4 | ( | ... | ) | __riscv_vloxei16_v_i32m4(__VA_ARGS__) |
| #define vloxei16_v_i32m4_m | ( | ... | ) | __riscv_vloxei16_v_i32m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_i32m8 | ( | ... | ) | __riscv_vloxei16_v_i32m8(__VA_ARGS__) |
| #define vloxei16_v_i32m8_m | ( | ... | ) | __riscv_vloxei16_v_i32m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_i32mf2 | ( | ... | ) | __riscv_vloxei16_v_i32mf2(__VA_ARGS__) |
| #define vloxei16_v_i32mf2_m | ( | ... | ) | __riscv_vloxei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxei16_v_i64m1 | ( | ... | ) | __riscv_vloxei16_v_i64m1(__VA_ARGS__) |
| #define vloxei16_v_i64m1_m | ( | ... | ) | __riscv_vloxei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_i64m2 | ( | ... | ) | __riscv_vloxei16_v_i64m2(__VA_ARGS__) |
| #define vloxei16_v_i64m2_m | ( | ... | ) | __riscv_vloxei16_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_i64m4 | ( | ... | ) | __riscv_vloxei16_v_i64m4(__VA_ARGS__) |
| #define vloxei16_v_i64m4_m | ( | ... | ) | __riscv_vloxei16_v_i64m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_i64m8 | ( | ... | ) | __riscv_vloxei16_v_i64m8(__VA_ARGS__) |
| #define vloxei16_v_i64m8_m | ( | ... | ) | __riscv_vloxei16_v_i64m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_i8m1 | ( | ... | ) | __riscv_vloxei16_v_i8m1(__VA_ARGS__) |
| #define vloxei16_v_i8m1_m | ( | ... | ) | __riscv_vloxei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_i8m2 | ( | ... | ) | __riscv_vloxei16_v_i8m2(__VA_ARGS__) |
| #define vloxei16_v_i8m2_m | ( | ... | ) | __riscv_vloxei16_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_i8m4 | ( | ... | ) | __riscv_vloxei16_v_i8m4(__VA_ARGS__) |
| #define vloxei16_v_i8m4_m | ( | ... | ) | __riscv_vloxei16_v_i8m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_i8mf2 | ( | ... | ) | __riscv_vloxei16_v_i8mf2(__VA_ARGS__) |
| #define vloxei16_v_i8mf2_m | ( | ... | ) | __riscv_vloxei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxei16_v_i8mf4 | ( | ... | ) | __riscv_vloxei16_v_i8mf4(__VA_ARGS__) |
| #define vloxei16_v_i8mf4_m | ( | ... | ) | __riscv_vloxei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxei16_v_i8mf8 | ( | ... | ) | __riscv_vloxei16_v_i8mf8(__VA_ARGS__) |
| #define vloxei16_v_i8mf8_m | ( | ... | ) | __riscv_vloxei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxei16_v_u16m1 | ( | ... | ) | __riscv_vloxei16_v_u16m1(__VA_ARGS__) |
| #define vloxei16_v_u16m1_m | ( | ... | ) | __riscv_vloxei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_u16m2 | ( | ... | ) | __riscv_vloxei16_v_u16m2(__VA_ARGS__) |
| #define vloxei16_v_u16m2_m | ( | ... | ) | __riscv_vloxei16_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_u16m4 | ( | ... | ) | __riscv_vloxei16_v_u16m4(__VA_ARGS__) |
| #define vloxei16_v_u16m4_m | ( | ... | ) | __riscv_vloxei16_v_u16m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_u16m8 | ( | ... | ) | __riscv_vloxei16_v_u16m8(__VA_ARGS__) |
| #define vloxei16_v_u16m8_m | ( | ... | ) | __riscv_vloxei16_v_u16m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_u16mf2 | ( | ... | ) | __riscv_vloxei16_v_u16mf2(__VA_ARGS__) |
| #define vloxei16_v_u16mf2_m | ( | ... | ) | __riscv_vloxei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxei16_v_u16mf4 | ( | ... | ) | __riscv_vloxei16_v_u16mf4(__VA_ARGS__) |
| #define vloxei16_v_u16mf4_m | ( | ... | ) | __riscv_vloxei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxei16_v_u32m1 | ( | ... | ) | __riscv_vloxei16_v_u32m1(__VA_ARGS__) |
| #define vloxei16_v_u32m1_m | ( | ... | ) | __riscv_vloxei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_u32m2 | ( | ... | ) | __riscv_vloxei16_v_u32m2(__VA_ARGS__) |
| #define vloxei16_v_u32m2_m | ( | ... | ) | __riscv_vloxei16_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_u32m4 | ( | ... | ) | __riscv_vloxei16_v_u32m4(__VA_ARGS__) |
| #define vloxei16_v_u32m4_m | ( | ... | ) | __riscv_vloxei16_v_u32m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_u32m8 | ( | ... | ) | __riscv_vloxei16_v_u32m8(__VA_ARGS__) |
| #define vloxei16_v_u32m8_m | ( | ... | ) | __riscv_vloxei16_v_u32m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_u32mf2 | ( | ... | ) | __riscv_vloxei16_v_u32mf2(__VA_ARGS__) |
| #define vloxei16_v_u32mf2_m | ( | ... | ) | __riscv_vloxei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxei16_v_u64m1 | ( | ... | ) | __riscv_vloxei16_v_u64m1(__VA_ARGS__) |
| #define vloxei16_v_u64m1_m | ( | ... | ) | __riscv_vloxei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_u64m2 | ( | ... | ) | __riscv_vloxei16_v_u64m2(__VA_ARGS__) |
| #define vloxei16_v_u64m2_m | ( | ... | ) | __riscv_vloxei16_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_u64m4 | ( | ... | ) | __riscv_vloxei16_v_u64m4(__VA_ARGS__) |
| #define vloxei16_v_u64m4_m | ( | ... | ) | __riscv_vloxei16_v_u64m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_u64m8 | ( | ... | ) | __riscv_vloxei16_v_u64m8(__VA_ARGS__) |
| #define vloxei16_v_u64m8_m | ( | ... | ) | __riscv_vloxei16_v_u64m8_tumu(__VA_ARGS__) |
| #define vloxei16_v_u8m1 | ( | ... | ) | __riscv_vloxei16_v_u8m1(__VA_ARGS__) |
| #define vloxei16_v_u8m1_m | ( | ... | ) | __riscv_vloxei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxei16_v_u8m2 | ( | ... | ) | __riscv_vloxei16_v_u8m2(__VA_ARGS__) |
| #define vloxei16_v_u8m2_m | ( | ... | ) | __riscv_vloxei16_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxei16_v_u8m4 | ( | ... | ) | __riscv_vloxei16_v_u8m4(__VA_ARGS__) |
| #define vloxei16_v_u8m4_m | ( | ... | ) | __riscv_vloxei16_v_u8m4_tumu(__VA_ARGS__) |
| #define vloxei16_v_u8mf2 | ( | ... | ) | __riscv_vloxei16_v_u8mf2(__VA_ARGS__) |
| #define vloxei16_v_u8mf2_m | ( | ... | ) | __riscv_vloxei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxei16_v_u8mf4 | ( | ... | ) | __riscv_vloxei16_v_u8mf4(__VA_ARGS__) |
| #define vloxei16_v_u8mf4_m | ( | ... | ) | __riscv_vloxei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxei16_v_u8mf8 | ( | ... | ) | __riscv_vloxei16_v_u8mf8(__VA_ARGS__) |
| #define vloxei16_v_u8mf8_m | ( | ... | ) | __riscv_vloxei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxei32_v_f16m1 | ( | ... | ) | __riscv_vloxei32_v_f16m1(__VA_ARGS__) |
| #define vloxei32_v_f16m1_m | ( | ... | ) | __riscv_vloxei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_f16m2 | ( | ... | ) | __riscv_vloxei32_v_f16m2(__VA_ARGS__) |
| #define vloxei32_v_f16m2_m | ( | ... | ) | __riscv_vloxei32_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_f16m4 | ( | ... | ) | __riscv_vloxei32_v_f16m4(__VA_ARGS__) |
| #define vloxei32_v_f16m4_m | ( | ... | ) | __riscv_vloxei32_v_f16m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_f16mf2 | ( | ... | ) | __riscv_vloxei32_v_f16mf2(__VA_ARGS__) |
| #define vloxei32_v_f16mf2_m | ( | ... | ) | __riscv_vloxei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxei32_v_f16mf4 | ( | ... | ) | __riscv_vloxei32_v_f16mf4(__VA_ARGS__) |
| #define vloxei32_v_f16mf4_m | ( | ... | ) | __riscv_vloxei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxei32_v_f32m1 | ( | ... | ) | __riscv_vloxei32_v_f32m1(__VA_ARGS__) |
| #define vloxei32_v_f32m1_m | ( | ... | ) | __riscv_vloxei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_f32m2 | ( | ... | ) | __riscv_vloxei32_v_f32m2(__VA_ARGS__) |
| #define vloxei32_v_f32m2_m | ( | ... | ) | __riscv_vloxei32_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_f32m4 | ( | ... | ) | __riscv_vloxei32_v_f32m4(__VA_ARGS__) |
| #define vloxei32_v_f32m4_m | ( | ... | ) | __riscv_vloxei32_v_f32m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_f32m8 | ( | ... | ) | __riscv_vloxei32_v_f32m8(__VA_ARGS__) |
| #define vloxei32_v_f32m8_m | ( | ... | ) | __riscv_vloxei32_v_f32m8_tumu(__VA_ARGS__) |
| #define vloxei32_v_f32mf2 | ( | ... | ) | __riscv_vloxei32_v_f32mf2(__VA_ARGS__) |
| #define vloxei32_v_f32mf2_m | ( | ... | ) | __riscv_vloxei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxei32_v_f64m1 | ( | ... | ) | __riscv_vloxei32_v_f64m1(__VA_ARGS__) |
| #define vloxei32_v_f64m1_m | ( | ... | ) | __riscv_vloxei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_f64m2 | ( | ... | ) | __riscv_vloxei32_v_f64m2(__VA_ARGS__) |
| #define vloxei32_v_f64m2_m | ( | ... | ) | __riscv_vloxei32_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_f64m4 | ( | ... | ) | __riscv_vloxei32_v_f64m4(__VA_ARGS__) |
| #define vloxei32_v_f64m4_m | ( | ... | ) | __riscv_vloxei32_v_f64m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_f64m8 | ( | ... | ) | __riscv_vloxei32_v_f64m8(__VA_ARGS__) |
| #define vloxei32_v_f64m8_m | ( | ... | ) | __riscv_vloxei32_v_f64m8_tumu(__VA_ARGS__) |
| #define vloxei32_v_i16m1 | ( | ... | ) | __riscv_vloxei32_v_i16m1(__VA_ARGS__) |
| #define vloxei32_v_i16m1_m | ( | ... | ) | __riscv_vloxei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_i16m2 | ( | ... | ) | __riscv_vloxei32_v_i16m2(__VA_ARGS__) |
| #define vloxei32_v_i16m2_m | ( | ... | ) | __riscv_vloxei32_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_i16m4 | ( | ... | ) | __riscv_vloxei32_v_i16m4(__VA_ARGS__) |
| #define vloxei32_v_i16m4_m | ( | ... | ) | __riscv_vloxei32_v_i16m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_i16mf2 | ( | ... | ) | __riscv_vloxei32_v_i16mf2(__VA_ARGS__) |
| #define vloxei32_v_i16mf2_m | ( | ... | ) | __riscv_vloxei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxei32_v_i16mf4 | ( | ... | ) | __riscv_vloxei32_v_i16mf4(__VA_ARGS__) |
| #define vloxei32_v_i16mf4_m | ( | ... | ) | __riscv_vloxei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxei32_v_i32m1 | ( | ... | ) | __riscv_vloxei32_v_i32m1(__VA_ARGS__) |
| #define vloxei32_v_i32m1_m | ( | ... | ) | __riscv_vloxei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_i32m2 | ( | ... | ) | __riscv_vloxei32_v_i32m2(__VA_ARGS__) |
| #define vloxei32_v_i32m2_m | ( | ... | ) | __riscv_vloxei32_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_i32m4 | ( | ... | ) | __riscv_vloxei32_v_i32m4(__VA_ARGS__) |
| #define vloxei32_v_i32m4_m | ( | ... | ) | __riscv_vloxei32_v_i32m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_i32m8 | ( | ... | ) | __riscv_vloxei32_v_i32m8(__VA_ARGS__) |
| #define vloxei32_v_i32m8_m | ( | ... | ) | __riscv_vloxei32_v_i32m8_tumu(__VA_ARGS__) |
| #define vloxei32_v_i32mf2 | ( | ... | ) | __riscv_vloxei32_v_i32mf2(__VA_ARGS__) |
| #define vloxei32_v_i32mf2_m | ( | ... | ) | __riscv_vloxei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxei32_v_i64m1 | ( | ... | ) | __riscv_vloxei32_v_i64m1(__VA_ARGS__) |
| #define vloxei32_v_i64m1_m | ( | ... | ) | __riscv_vloxei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_i64m2 | ( | ... | ) | __riscv_vloxei32_v_i64m2(__VA_ARGS__) |
| #define vloxei32_v_i64m2_m | ( | ... | ) | __riscv_vloxei32_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_i64m4 | ( | ... | ) | __riscv_vloxei32_v_i64m4(__VA_ARGS__) |
| #define vloxei32_v_i64m4_m | ( | ... | ) | __riscv_vloxei32_v_i64m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_i64m8 | ( | ... | ) | __riscv_vloxei32_v_i64m8(__VA_ARGS__) |
| #define vloxei32_v_i64m8_m | ( | ... | ) | __riscv_vloxei32_v_i64m8_tumu(__VA_ARGS__) |
| #define vloxei32_v_i8m1 | ( | ... | ) | __riscv_vloxei32_v_i8m1(__VA_ARGS__) |
| #define vloxei32_v_i8m1_m | ( | ... | ) | __riscv_vloxei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_i8m2 | ( | ... | ) | __riscv_vloxei32_v_i8m2(__VA_ARGS__) |
| #define vloxei32_v_i8m2_m | ( | ... | ) | __riscv_vloxei32_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_i8mf2 | ( | ... | ) | __riscv_vloxei32_v_i8mf2(__VA_ARGS__) |
| #define vloxei32_v_i8mf2_m | ( | ... | ) | __riscv_vloxei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxei32_v_i8mf4 | ( | ... | ) | __riscv_vloxei32_v_i8mf4(__VA_ARGS__) |
| #define vloxei32_v_i8mf4_m | ( | ... | ) | __riscv_vloxei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxei32_v_i8mf8 | ( | ... | ) | __riscv_vloxei32_v_i8mf8(__VA_ARGS__) |
| #define vloxei32_v_i8mf8_m | ( | ... | ) | __riscv_vloxei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxei32_v_u16m1 | ( | ... | ) | __riscv_vloxei32_v_u16m1(__VA_ARGS__) |
| #define vloxei32_v_u16m1_m | ( | ... | ) | __riscv_vloxei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_u16m2 | ( | ... | ) | __riscv_vloxei32_v_u16m2(__VA_ARGS__) |
| #define vloxei32_v_u16m2_m | ( | ... | ) | __riscv_vloxei32_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_u16m4 | ( | ... | ) | __riscv_vloxei32_v_u16m4(__VA_ARGS__) |
| #define vloxei32_v_u16m4_m | ( | ... | ) | __riscv_vloxei32_v_u16m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_u16mf2 | ( | ... | ) | __riscv_vloxei32_v_u16mf2(__VA_ARGS__) |
| #define vloxei32_v_u16mf2_m | ( | ... | ) | __riscv_vloxei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxei32_v_u16mf4 | ( | ... | ) | __riscv_vloxei32_v_u16mf4(__VA_ARGS__) |
| #define vloxei32_v_u16mf4_m | ( | ... | ) | __riscv_vloxei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxei32_v_u32m1 | ( | ... | ) | __riscv_vloxei32_v_u32m1(__VA_ARGS__) |
| #define vloxei32_v_u32m1_m | ( | ... | ) | __riscv_vloxei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_u32m2 | ( | ... | ) | __riscv_vloxei32_v_u32m2(__VA_ARGS__) |
| #define vloxei32_v_u32m2_m | ( | ... | ) | __riscv_vloxei32_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_u32m4 | ( | ... | ) | __riscv_vloxei32_v_u32m4(__VA_ARGS__) |
| #define vloxei32_v_u32m4_m | ( | ... | ) | __riscv_vloxei32_v_u32m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_u32m8 | ( | ... | ) | __riscv_vloxei32_v_u32m8(__VA_ARGS__) |
| #define vloxei32_v_u32m8_m | ( | ... | ) | __riscv_vloxei32_v_u32m8_tumu(__VA_ARGS__) |
| #define vloxei32_v_u32mf2 | ( | ... | ) | __riscv_vloxei32_v_u32mf2(__VA_ARGS__) |
| #define vloxei32_v_u32mf2_m | ( | ... | ) | __riscv_vloxei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxei32_v_u64m1 | ( | ... | ) | __riscv_vloxei32_v_u64m1(__VA_ARGS__) |
| #define vloxei32_v_u64m1_m | ( | ... | ) | __riscv_vloxei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_u64m2 | ( | ... | ) | __riscv_vloxei32_v_u64m2(__VA_ARGS__) |
| #define vloxei32_v_u64m2_m | ( | ... | ) | __riscv_vloxei32_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_u64m4 | ( | ... | ) | __riscv_vloxei32_v_u64m4(__VA_ARGS__) |
| #define vloxei32_v_u64m4_m | ( | ... | ) | __riscv_vloxei32_v_u64m4_tumu(__VA_ARGS__) |
| #define vloxei32_v_u64m8 | ( | ... | ) | __riscv_vloxei32_v_u64m8(__VA_ARGS__) |
| #define vloxei32_v_u64m8_m | ( | ... | ) | __riscv_vloxei32_v_u64m8_tumu(__VA_ARGS__) |
| #define vloxei32_v_u8m1 | ( | ... | ) | __riscv_vloxei32_v_u8m1(__VA_ARGS__) |
| #define vloxei32_v_u8m1_m | ( | ... | ) | __riscv_vloxei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxei32_v_u8m2 | ( | ... | ) | __riscv_vloxei32_v_u8m2(__VA_ARGS__) |
| #define vloxei32_v_u8m2_m | ( | ... | ) | __riscv_vloxei32_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxei32_v_u8mf2 | ( | ... | ) | __riscv_vloxei32_v_u8mf2(__VA_ARGS__) |
| #define vloxei32_v_u8mf2_m | ( | ... | ) | __riscv_vloxei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxei32_v_u8mf4 | ( | ... | ) | __riscv_vloxei32_v_u8mf4(__VA_ARGS__) |
| #define vloxei32_v_u8mf4_m | ( | ... | ) | __riscv_vloxei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxei32_v_u8mf8 | ( | ... | ) | __riscv_vloxei32_v_u8mf8(__VA_ARGS__) |
| #define vloxei32_v_u8mf8_m | ( | ... | ) | __riscv_vloxei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxei64_v_f16m1 | ( | ... | ) | __riscv_vloxei64_v_f16m1(__VA_ARGS__) |
| #define vloxei64_v_f16m1_m | ( | ... | ) | __riscv_vloxei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_f16m2 | ( | ... | ) | __riscv_vloxei64_v_f16m2(__VA_ARGS__) |
| #define vloxei64_v_f16m2_m | ( | ... | ) | __riscv_vloxei64_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_f16mf2 | ( | ... | ) | __riscv_vloxei64_v_f16mf2(__VA_ARGS__) |
| #define vloxei64_v_f16mf2_m | ( | ... | ) | __riscv_vloxei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxei64_v_f16mf4 | ( | ... | ) | __riscv_vloxei64_v_f16mf4(__VA_ARGS__) |
| #define vloxei64_v_f16mf4_m | ( | ... | ) | __riscv_vloxei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxei64_v_f32m1 | ( | ... | ) | __riscv_vloxei64_v_f32m1(__VA_ARGS__) |
| #define vloxei64_v_f32m1_m | ( | ... | ) | __riscv_vloxei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_f32m2 | ( | ... | ) | __riscv_vloxei64_v_f32m2(__VA_ARGS__) |
| #define vloxei64_v_f32m2_m | ( | ... | ) | __riscv_vloxei64_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_f32m4 | ( | ... | ) | __riscv_vloxei64_v_f32m4(__VA_ARGS__) |
| #define vloxei64_v_f32m4_m | ( | ... | ) | __riscv_vloxei64_v_f32m4_tumu(__VA_ARGS__) |
| #define vloxei64_v_f32mf2 | ( | ... | ) | __riscv_vloxei64_v_f32mf2(__VA_ARGS__) |
| #define vloxei64_v_f32mf2_m | ( | ... | ) | __riscv_vloxei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxei64_v_f64m1 | ( | ... | ) | __riscv_vloxei64_v_f64m1(__VA_ARGS__) |
| #define vloxei64_v_f64m1_m | ( | ... | ) | __riscv_vloxei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_f64m2 | ( | ... | ) | __riscv_vloxei64_v_f64m2(__VA_ARGS__) |
| #define vloxei64_v_f64m2_m | ( | ... | ) | __riscv_vloxei64_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_f64m4 | ( | ... | ) | __riscv_vloxei64_v_f64m4(__VA_ARGS__) |
| #define vloxei64_v_f64m4_m | ( | ... | ) | __riscv_vloxei64_v_f64m4_tumu(__VA_ARGS__) |
| #define vloxei64_v_f64m8 | ( | ... | ) | __riscv_vloxei64_v_f64m8(__VA_ARGS__) |
| #define vloxei64_v_f64m8_m | ( | ... | ) | __riscv_vloxei64_v_f64m8_tumu(__VA_ARGS__) |
| #define vloxei64_v_i16m1 | ( | ... | ) | __riscv_vloxei64_v_i16m1(__VA_ARGS__) |
| #define vloxei64_v_i16m1_m | ( | ... | ) | __riscv_vloxei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_i16m2 | ( | ... | ) | __riscv_vloxei64_v_i16m2(__VA_ARGS__) |
| #define vloxei64_v_i16m2_m | ( | ... | ) | __riscv_vloxei64_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_i16mf2 | ( | ... | ) | __riscv_vloxei64_v_i16mf2(__VA_ARGS__) |
| #define vloxei64_v_i16mf2_m | ( | ... | ) | __riscv_vloxei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxei64_v_i16mf4 | ( | ... | ) | __riscv_vloxei64_v_i16mf4(__VA_ARGS__) |
| #define vloxei64_v_i16mf4_m | ( | ... | ) | __riscv_vloxei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxei64_v_i32m1 | ( | ... | ) | __riscv_vloxei64_v_i32m1(__VA_ARGS__) |
| #define vloxei64_v_i32m1_m | ( | ... | ) | __riscv_vloxei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_i32m2 | ( | ... | ) | __riscv_vloxei64_v_i32m2(__VA_ARGS__) |
| #define vloxei64_v_i32m2_m | ( | ... | ) | __riscv_vloxei64_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_i32m4 | ( | ... | ) | __riscv_vloxei64_v_i32m4(__VA_ARGS__) |
| #define vloxei64_v_i32m4_m | ( | ... | ) | __riscv_vloxei64_v_i32m4_tumu(__VA_ARGS__) |
| #define vloxei64_v_i32mf2 | ( | ... | ) | __riscv_vloxei64_v_i32mf2(__VA_ARGS__) |
| #define vloxei64_v_i32mf2_m | ( | ... | ) | __riscv_vloxei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxei64_v_i64m1 | ( | ... | ) | __riscv_vloxei64_v_i64m1(__VA_ARGS__) |
| #define vloxei64_v_i64m1_m | ( | ... | ) | __riscv_vloxei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_i64m2 | ( | ... | ) | __riscv_vloxei64_v_i64m2(__VA_ARGS__) |
| #define vloxei64_v_i64m2_m | ( | ... | ) | __riscv_vloxei64_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_i64m4 | ( | ... | ) | __riscv_vloxei64_v_i64m4(__VA_ARGS__) |
| #define vloxei64_v_i64m4_m | ( | ... | ) | __riscv_vloxei64_v_i64m4_tumu(__VA_ARGS__) |
| #define vloxei64_v_i64m8 | ( | ... | ) | __riscv_vloxei64_v_i64m8(__VA_ARGS__) |
| #define vloxei64_v_i64m8_m | ( | ... | ) | __riscv_vloxei64_v_i64m8_tumu(__VA_ARGS__) |
| #define vloxei64_v_i8m1 | ( | ... | ) | __riscv_vloxei64_v_i8m1(__VA_ARGS__) |
| #define vloxei64_v_i8m1_m | ( | ... | ) | __riscv_vloxei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_i8mf2 | ( | ... | ) | __riscv_vloxei64_v_i8mf2(__VA_ARGS__) |
| #define vloxei64_v_i8mf2_m | ( | ... | ) | __riscv_vloxei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxei64_v_i8mf4 | ( | ... | ) | __riscv_vloxei64_v_i8mf4(__VA_ARGS__) |
| #define vloxei64_v_i8mf4_m | ( | ... | ) | __riscv_vloxei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxei64_v_i8mf8 | ( | ... | ) | __riscv_vloxei64_v_i8mf8(__VA_ARGS__) |
| #define vloxei64_v_i8mf8_m | ( | ... | ) | __riscv_vloxei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxei64_v_u16m1 | ( | ... | ) | __riscv_vloxei64_v_u16m1(__VA_ARGS__) |
| #define vloxei64_v_u16m1_m | ( | ... | ) | __riscv_vloxei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_u16m2 | ( | ... | ) | __riscv_vloxei64_v_u16m2(__VA_ARGS__) |
| #define vloxei64_v_u16m2_m | ( | ... | ) | __riscv_vloxei64_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_u16mf2 | ( | ... | ) | __riscv_vloxei64_v_u16mf2(__VA_ARGS__) |
| #define vloxei64_v_u16mf2_m | ( | ... | ) | __riscv_vloxei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxei64_v_u16mf4 | ( | ... | ) | __riscv_vloxei64_v_u16mf4(__VA_ARGS__) |
| #define vloxei64_v_u16mf4_m | ( | ... | ) | __riscv_vloxei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxei64_v_u32m1 | ( | ... | ) | __riscv_vloxei64_v_u32m1(__VA_ARGS__) |
| #define vloxei64_v_u32m1_m | ( | ... | ) | __riscv_vloxei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_u32m2 | ( | ... | ) | __riscv_vloxei64_v_u32m2(__VA_ARGS__) |
| #define vloxei64_v_u32m2_m | ( | ... | ) | __riscv_vloxei64_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_u32m4 | ( | ... | ) | __riscv_vloxei64_v_u32m4(__VA_ARGS__) |
| #define vloxei64_v_u32m4_m | ( | ... | ) | __riscv_vloxei64_v_u32m4_tumu(__VA_ARGS__) |
| #define vloxei64_v_u32mf2 | ( | ... | ) | __riscv_vloxei64_v_u32mf2(__VA_ARGS__) |
| #define vloxei64_v_u32mf2_m | ( | ... | ) | __riscv_vloxei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxei64_v_u64m1 | ( | ... | ) | __riscv_vloxei64_v_u64m1(__VA_ARGS__) |
| #define vloxei64_v_u64m1_m | ( | ... | ) | __riscv_vloxei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_u64m2 | ( | ... | ) | __riscv_vloxei64_v_u64m2(__VA_ARGS__) |
| #define vloxei64_v_u64m2_m | ( | ... | ) | __riscv_vloxei64_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxei64_v_u64m4 | ( | ... | ) | __riscv_vloxei64_v_u64m4(__VA_ARGS__) |
| #define vloxei64_v_u64m4_m | ( | ... | ) | __riscv_vloxei64_v_u64m4_tumu(__VA_ARGS__) |
| #define vloxei64_v_u64m8 | ( | ... | ) | __riscv_vloxei64_v_u64m8(__VA_ARGS__) |
| #define vloxei64_v_u64m8_m | ( | ... | ) | __riscv_vloxei64_v_u64m8_tumu(__VA_ARGS__) |
| #define vloxei64_v_u8m1 | ( | ... | ) | __riscv_vloxei64_v_u8m1(__VA_ARGS__) |
| #define vloxei64_v_u8m1_m | ( | ... | ) | __riscv_vloxei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxei64_v_u8mf2 | ( | ... | ) | __riscv_vloxei64_v_u8mf2(__VA_ARGS__) |
| #define vloxei64_v_u8mf2_m | ( | ... | ) | __riscv_vloxei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxei64_v_u8mf4 | ( | ... | ) | __riscv_vloxei64_v_u8mf4(__VA_ARGS__) |
| #define vloxei64_v_u8mf4_m | ( | ... | ) | __riscv_vloxei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxei64_v_u8mf8 | ( | ... | ) | __riscv_vloxei64_v_u8mf8(__VA_ARGS__) |
| #define vloxei64_v_u8mf8_m | ( | ... | ) | __riscv_vloxei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxei8_v_f16m1 | ( | ... | ) | __riscv_vloxei8_v_f16m1(__VA_ARGS__) |
| #define vloxei8_v_f16m1_m | ( | ... | ) | __riscv_vloxei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_f16m2 | ( | ... | ) | __riscv_vloxei8_v_f16m2(__VA_ARGS__) |
| #define vloxei8_v_f16m2_m | ( | ... | ) | __riscv_vloxei8_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_f16m4 | ( | ... | ) | __riscv_vloxei8_v_f16m4(__VA_ARGS__) |
| #define vloxei8_v_f16m4_m | ( | ... | ) | __riscv_vloxei8_v_f16m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_f16m8 | ( | ... | ) | __riscv_vloxei8_v_f16m8(__VA_ARGS__) |
| #define vloxei8_v_f16m8_m | ( | ... | ) | __riscv_vloxei8_v_f16m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_f16mf2 | ( | ... | ) | __riscv_vloxei8_v_f16mf2(__VA_ARGS__) |
| #define vloxei8_v_f16mf2_m | ( | ... | ) | __riscv_vloxei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxei8_v_f16mf4 | ( | ... | ) | __riscv_vloxei8_v_f16mf4(__VA_ARGS__) |
| #define vloxei8_v_f16mf4_m | ( | ... | ) | __riscv_vloxei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxei8_v_f32m1 | ( | ... | ) | __riscv_vloxei8_v_f32m1(__VA_ARGS__) |
| #define vloxei8_v_f32m1_m | ( | ... | ) | __riscv_vloxei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_f32m2 | ( | ... | ) | __riscv_vloxei8_v_f32m2(__VA_ARGS__) |
| #define vloxei8_v_f32m2_m | ( | ... | ) | __riscv_vloxei8_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_f32m4 | ( | ... | ) | __riscv_vloxei8_v_f32m4(__VA_ARGS__) |
| #define vloxei8_v_f32m4_m | ( | ... | ) | __riscv_vloxei8_v_f32m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_f32m8 | ( | ... | ) | __riscv_vloxei8_v_f32m8(__VA_ARGS__) |
| #define vloxei8_v_f32m8_m | ( | ... | ) | __riscv_vloxei8_v_f32m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_f32mf2 | ( | ... | ) | __riscv_vloxei8_v_f32mf2(__VA_ARGS__) |
| #define vloxei8_v_f32mf2_m | ( | ... | ) | __riscv_vloxei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxei8_v_f64m1 | ( | ... | ) | __riscv_vloxei8_v_f64m1(__VA_ARGS__) |
| #define vloxei8_v_f64m1_m | ( | ... | ) | __riscv_vloxei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_f64m2 | ( | ... | ) | __riscv_vloxei8_v_f64m2(__VA_ARGS__) |
| #define vloxei8_v_f64m2_m | ( | ... | ) | __riscv_vloxei8_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_f64m4 | ( | ... | ) | __riscv_vloxei8_v_f64m4(__VA_ARGS__) |
| #define vloxei8_v_f64m4_m | ( | ... | ) | __riscv_vloxei8_v_f64m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_f64m8 | ( | ... | ) | __riscv_vloxei8_v_f64m8(__VA_ARGS__) |
| #define vloxei8_v_f64m8_m | ( | ... | ) | __riscv_vloxei8_v_f64m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_i16m1 | ( | ... | ) | __riscv_vloxei8_v_i16m1(__VA_ARGS__) |
| #define vloxei8_v_i16m1_m | ( | ... | ) | __riscv_vloxei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_i16m2 | ( | ... | ) | __riscv_vloxei8_v_i16m2(__VA_ARGS__) |
| #define vloxei8_v_i16m2_m | ( | ... | ) | __riscv_vloxei8_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_i16m4 | ( | ... | ) | __riscv_vloxei8_v_i16m4(__VA_ARGS__) |
| #define vloxei8_v_i16m4_m | ( | ... | ) | __riscv_vloxei8_v_i16m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_i16m8 | ( | ... | ) | __riscv_vloxei8_v_i16m8(__VA_ARGS__) |
| #define vloxei8_v_i16m8_m | ( | ... | ) | __riscv_vloxei8_v_i16m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_i16mf2 | ( | ... | ) | __riscv_vloxei8_v_i16mf2(__VA_ARGS__) |
| #define vloxei8_v_i16mf2_m | ( | ... | ) | __riscv_vloxei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxei8_v_i16mf4 | ( | ... | ) | __riscv_vloxei8_v_i16mf4(__VA_ARGS__) |
| #define vloxei8_v_i16mf4_m | ( | ... | ) | __riscv_vloxei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxei8_v_i32m1 | ( | ... | ) | __riscv_vloxei8_v_i32m1(__VA_ARGS__) |
| #define vloxei8_v_i32m1_m | ( | ... | ) | __riscv_vloxei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_i32m2 | ( | ... | ) | __riscv_vloxei8_v_i32m2(__VA_ARGS__) |
| #define vloxei8_v_i32m2_m | ( | ... | ) | __riscv_vloxei8_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_i32m4 | ( | ... | ) | __riscv_vloxei8_v_i32m4(__VA_ARGS__) |
| #define vloxei8_v_i32m4_m | ( | ... | ) | __riscv_vloxei8_v_i32m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_i32m8 | ( | ... | ) | __riscv_vloxei8_v_i32m8(__VA_ARGS__) |
| #define vloxei8_v_i32m8_m | ( | ... | ) | __riscv_vloxei8_v_i32m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_i32mf2 | ( | ... | ) | __riscv_vloxei8_v_i32mf2(__VA_ARGS__) |
| #define vloxei8_v_i32mf2_m | ( | ... | ) | __riscv_vloxei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxei8_v_i64m1 | ( | ... | ) | __riscv_vloxei8_v_i64m1(__VA_ARGS__) |
| #define vloxei8_v_i64m1_m | ( | ... | ) | __riscv_vloxei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_i64m2 | ( | ... | ) | __riscv_vloxei8_v_i64m2(__VA_ARGS__) |
| #define vloxei8_v_i64m2_m | ( | ... | ) | __riscv_vloxei8_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_i64m4 | ( | ... | ) | __riscv_vloxei8_v_i64m4(__VA_ARGS__) |
| #define vloxei8_v_i64m4_m | ( | ... | ) | __riscv_vloxei8_v_i64m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_i64m8 | ( | ... | ) | __riscv_vloxei8_v_i64m8(__VA_ARGS__) |
| #define vloxei8_v_i64m8_m | ( | ... | ) | __riscv_vloxei8_v_i64m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_i8m1 | ( | ... | ) | __riscv_vloxei8_v_i8m1(__VA_ARGS__) |
| #define vloxei8_v_i8m1_m | ( | ... | ) | __riscv_vloxei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_i8m2 | ( | ... | ) | __riscv_vloxei8_v_i8m2(__VA_ARGS__) |
| #define vloxei8_v_i8m2_m | ( | ... | ) | __riscv_vloxei8_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_i8m4 | ( | ... | ) | __riscv_vloxei8_v_i8m4(__VA_ARGS__) |
| #define vloxei8_v_i8m4_m | ( | ... | ) | __riscv_vloxei8_v_i8m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_i8m8 | ( | ... | ) | __riscv_vloxei8_v_i8m8(__VA_ARGS__) |
| #define vloxei8_v_i8m8_m | ( | ... | ) | __riscv_vloxei8_v_i8m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_i8mf2 | ( | ... | ) | __riscv_vloxei8_v_i8mf2(__VA_ARGS__) |
| #define vloxei8_v_i8mf2_m | ( | ... | ) | __riscv_vloxei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxei8_v_i8mf4 | ( | ... | ) | __riscv_vloxei8_v_i8mf4(__VA_ARGS__) |
| #define vloxei8_v_i8mf4_m | ( | ... | ) | __riscv_vloxei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxei8_v_i8mf8 | ( | ... | ) | __riscv_vloxei8_v_i8mf8(__VA_ARGS__) |
| #define vloxei8_v_i8mf8_m | ( | ... | ) | __riscv_vloxei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxei8_v_u16m1 | ( | ... | ) | __riscv_vloxei8_v_u16m1(__VA_ARGS__) |
| #define vloxei8_v_u16m1_m | ( | ... | ) | __riscv_vloxei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_u16m2 | ( | ... | ) | __riscv_vloxei8_v_u16m2(__VA_ARGS__) |
| #define vloxei8_v_u16m2_m | ( | ... | ) | __riscv_vloxei8_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_u16m4 | ( | ... | ) | __riscv_vloxei8_v_u16m4(__VA_ARGS__) |
| #define vloxei8_v_u16m4_m | ( | ... | ) | __riscv_vloxei8_v_u16m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_u16m8 | ( | ... | ) | __riscv_vloxei8_v_u16m8(__VA_ARGS__) |
| #define vloxei8_v_u16m8_m | ( | ... | ) | __riscv_vloxei8_v_u16m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_u16mf2 | ( | ... | ) | __riscv_vloxei8_v_u16mf2(__VA_ARGS__) |
| #define vloxei8_v_u16mf2_m | ( | ... | ) | __riscv_vloxei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxei8_v_u16mf4 | ( | ... | ) | __riscv_vloxei8_v_u16mf4(__VA_ARGS__) |
| #define vloxei8_v_u16mf4_m | ( | ... | ) | __riscv_vloxei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxei8_v_u32m1 | ( | ... | ) | __riscv_vloxei8_v_u32m1(__VA_ARGS__) |
| #define vloxei8_v_u32m1_m | ( | ... | ) | __riscv_vloxei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_u32m2 | ( | ... | ) | __riscv_vloxei8_v_u32m2(__VA_ARGS__) |
| #define vloxei8_v_u32m2_m | ( | ... | ) | __riscv_vloxei8_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_u32m4 | ( | ... | ) | __riscv_vloxei8_v_u32m4(__VA_ARGS__) |
| #define vloxei8_v_u32m4_m | ( | ... | ) | __riscv_vloxei8_v_u32m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_u32m8 | ( | ... | ) | __riscv_vloxei8_v_u32m8(__VA_ARGS__) |
| #define vloxei8_v_u32m8_m | ( | ... | ) | __riscv_vloxei8_v_u32m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_u32mf2 | ( | ... | ) | __riscv_vloxei8_v_u32mf2(__VA_ARGS__) |
| #define vloxei8_v_u32mf2_m | ( | ... | ) | __riscv_vloxei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxei8_v_u64m1 | ( | ... | ) | __riscv_vloxei8_v_u64m1(__VA_ARGS__) |
| #define vloxei8_v_u64m1_m | ( | ... | ) | __riscv_vloxei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_u64m2 | ( | ... | ) | __riscv_vloxei8_v_u64m2(__VA_ARGS__) |
| #define vloxei8_v_u64m2_m | ( | ... | ) | __riscv_vloxei8_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_u64m4 | ( | ... | ) | __riscv_vloxei8_v_u64m4(__VA_ARGS__) |
| #define vloxei8_v_u64m4_m | ( | ... | ) | __riscv_vloxei8_v_u64m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_u64m8 | ( | ... | ) | __riscv_vloxei8_v_u64m8(__VA_ARGS__) |
| #define vloxei8_v_u64m8_m | ( | ... | ) | __riscv_vloxei8_v_u64m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_u8m1 | ( | ... | ) | __riscv_vloxei8_v_u8m1(__VA_ARGS__) |
| #define vloxei8_v_u8m1_m | ( | ... | ) | __riscv_vloxei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxei8_v_u8m2 | ( | ... | ) | __riscv_vloxei8_v_u8m2(__VA_ARGS__) |
| #define vloxei8_v_u8m2_m | ( | ... | ) | __riscv_vloxei8_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxei8_v_u8m4 | ( | ... | ) | __riscv_vloxei8_v_u8m4(__VA_ARGS__) |
| #define vloxei8_v_u8m4_m | ( | ... | ) | __riscv_vloxei8_v_u8m4_tumu(__VA_ARGS__) |
| #define vloxei8_v_u8m8 | ( | ... | ) | __riscv_vloxei8_v_u8m8(__VA_ARGS__) |
| #define vloxei8_v_u8m8_m | ( | ... | ) | __riscv_vloxei8_v_u8m8_tumu(__VA_ARGS__) |
| #define vloxei8_v_u8mf2 | ( | ... | ) | __riscv_vloxei8_v_u8mf2(__VA_ARGS__) |
| #define vloxei8_v_u8mf2_m | ( | ... | ) | __riscv_vloxei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxei8_v_u8mf4 | ( | ... | ) | __riscv_vloxei8_v_u8mf4(__VA_ARGS__) |
| #define vloxei8_v_u8mf4_m | ( | ... | ) | __riscv_vloxei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxei8_v_u8mf8 | ( | ... | ) | __riscv_vloxei8_v_u8mf8(__VA_ARGS__) |
| #define vloxei8_v_u8mf8_m | ( | ... | ) | __riscv_vloxei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_f16m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_f16m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_f16m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16mf2 | ( | ... | ) | __riscv_vloxseg2ei16_v_f16mf2(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16mf4 | ( | ... | ) | __riscv_vloxseg2ei16_v_f16mf4(__VA_ARGS__) |
| #define vloxseg2ei16_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f32m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_f32m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_f32m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f32m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_f32m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_f32m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f32m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_f32m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_f32m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f32mf2 | ( | ... | ) | __riscv_vloxseg2ei16_v_f32mf2(__VA_ARGS__) |
| #define vloxseg2ei16_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f64m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_f64m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_f64m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f64m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_f64m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_f64m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_f64m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_f64m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_f64m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_f64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_i16m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_i16m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_i16m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16mf2 | ( | ... | ) | __riscv_vloxseg2ei16_v_i16mf2(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16mf4 | ( | ... | ) | __riscv_vloxseg2ei16_v_i16mf4(__VA_ARGS__) |
| #define vloxseg2ei16_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i32m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_i32m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_i32m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i32m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_i32m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_i32m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i32m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_i32m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_i32m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i32mf2 | ( | ... | ) | __riscv_vloxseg2ei16_v_i32mf2(__VA_ARGS__) |
| #define vloxseg2ei16_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i64m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_i64m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_i64m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i64m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_i64m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_i64m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i64m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_i64m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_i64m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_i8m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_i8m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_i8m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i8m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8mf2 | ( | ... | ) | __riscv_vloxseg2ei16_v_i8mf2(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8mf4 | ( | ... | ) | __riscv_vloxseg2ei16_v_i8mf4(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8mf8 | ( | ... | ) | __riscv_vloxseg2ei16_v_i8mf8(__VA_ARGS__) |
| #define vloxseg2ei16_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg2ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_u16m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_u16m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_u16m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16mf2 | ( | ... | ) | __riscv_vloxseg2ei16_v_u16mf2(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16mf4 | ( | ... | ) | __riscv_vloxseg2ei16_v_u16mf4(__VA_ARGS__) |
| #define vloxseg2ei16_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u32m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_u32m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_u32m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u32m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_u32m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_u32m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u32m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_u32m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_u32m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u32mf2 | ( | ... | ) | __riscv_vloxseg2ei16_v_u32mf2(__VA_ARGS__) |
| #define vloxseg2ei16_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u64m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_u64m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_u64m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u64m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_u64m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_u64m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u64m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_u64m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_u64m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8m1 | ( | ... | ) | __riscv_vloxseg2ei16_v_u8m1(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8m1_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8m2 | ( | ... | ) | __riscv_vloxseg2ei16_v_u8m2(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8m2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8m4 | ( | ... | ) | __riscv_vloxseg2ei16_v_u8m4(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8m4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u8m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8mf2 | ( | ... | ) | __riscv_vloxseg2ei16_v_u8mf2(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8mf4 | ( | ... | ) | __riscv_vloxseg2ei16_v_u8mf4(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8mf8 | ( | ... | ) | __riscv_vloxseg2ei16_v_u8mf8(__VA_ARGS__) |
| #define vloxseg2ei16_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg2ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_f16m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_f16m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_f16m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16mf2 | ( | ... | ) | __riscv_vloxseg2ei32_v_f16mf2(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16mf4 | ( | ... | ) | __riscv_vloxseg2ei32_v_f16mf4(__VA_ARGS__) |
| #define vloxseg2ei32_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f32m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_f32m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_f32m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f32m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_f32m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_f32m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f32m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_f32m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_f32m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f32mf2 | ( | ... | ) | __riscv_vloxseg2ei32_v_f32mf2(__VA_ARGS__) |
| #define vloxseg2ei32_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f64m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_f64m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_f64m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f64m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_f64m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_f64m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_f64m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_f64m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_f64m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_f64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_i16m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_i16m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_i16m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16mf2 | ( | ... | ) | __riscv_vloxseg2ei32_v_i16mf2(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16mf4 | ( | ... | ) | __riscv_vloxseg2ei32_v_i16mf4(__VA_ARGS__) |
| #define vloxseg2ei32_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i32m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_i32m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_i32m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i32m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_i32m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_i32m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i32m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_i32m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_i32m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i32mf2 | ( | ... | ) | __riscv_vloxseg2ei32_v_i32mf2(__VA_ARGS__) |
| #define vloxseg2ei32_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i64m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_i64m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_i64m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i64m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_i64m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_i64m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i64m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_i64m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_i64m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_i8m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_i8m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8mf2 | ( | ... | ) | __riscv_vloxseg2ei32_v_i8mf2(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8mf4 | ( | ... | ) | __riscv_vloxseg2ei32_v_i8mf4(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8mf8 | ( | ... | ) | __riscv_vloxseg2ei32_v_i8mf8(__VA_ARGS__) |
| #define vloxseg2ei32_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg2ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_u16m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_u16m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_u16m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16mf2 | ( | ... | ) | __riscv_vloxseg2ei32_v_u16mf2(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16mf4 | ( | ... | ) | __riscv_vloxseg2ei32_v_u16mf4(__VA_ARGS__) |
| #define vloxseg2ei32_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u32m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_u32m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_u32m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u32m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_u32m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_u32m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u32m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_u32m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_u32m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u32mf2 | ( | ... | ) | __riscv_vloxseg2ei32_v_u32mf2(__VA_ARGS__) |
| #define vloxseg2ei32_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u64m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_u64m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_u64m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u64m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_u64m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_u64m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u64m4 | ( | ... | ) | __riscv_vloxseg2ei32_v_u64m4(__VA_ARGS__) |
| #define vloxseg2ei32_v_u64m4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8m1 | ( | ... | ) | __riscv_vloxseg2ei32_v_u8m1(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8m1_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8m2 | ( | ... | ) | __riscv_vloxseg2ei32_v_u8m2(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8m2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8mf2 | ( | ... | ) | __riscv_vloxseg2ei32_v_u8mf2(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8mf4 | ( | ... | ) | __riscv_vloxseg2ei32_v_u8mf4(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8mf8 | ( | ... | ) | __riscv_vloxseg2ei32_v_u8mf8(__VA_ARGS__) |
| #define vloxseg2ei32_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg2ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f16m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_f16m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_f16m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f16m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_f16m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_f16m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f16mf2 | ( | ... | ) | __riscv_vloxseg2ei64_v_f16mf2(__VA_ARGS__) |
| #define vloxseg2ei64_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f16mf4 | ( | ... | ) | __riscv_vloxseg2ei64_v_f16mf4(__VA_ARGS__) |
| #define vloxseg2ei64_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f32m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_f32m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_f32m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f32m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_f32m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_f32m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f32m4 | ( | ... | ) | __riscv_vloxseg2ei64_v_f32m4(__VA_ARGS__) |
| #define vloxseg2ei64_v_f32m4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f32mf2 | ( | ... | ) | __riscv_vloxseg2ei64_v_f32mf2(__VA_ARGS__) |
| #define vloxseg2ei64_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f64m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_f64m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_f64m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f64m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_f64m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_f64m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_f64m4 | ( | ... | ) | __riscv_vloxseg2ei64_v_f64m4(__VA_ARGS__) |
| #define vloxseg2ei64_v_f64m4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_f64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i16m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_i16m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_i16m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i16m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_i16m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_i16m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i16mf2 | ( | ... | ) | __riscv_vloxseg2ei64_v_i16mf2(__VA_ARGS__) |
| #define vloxseg2ei64_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i16mf4 | ( | ... | ) | __riscv_vloxseg2ei64_v_i16mf4(__VA_ARGS__) |
| #define vloxseg2ei64_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i32m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_i32m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_i32m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i32m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_i32m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_i32m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i32m4 | ( | ... | ) | __riscv_vloxseg2ei64_v_i32m4(__VA_ARGS__) |
| #define vloxseg2ei64_v_i32m4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i32mf2 | ( | ... | ) | __riscv_vloxseg2ei64_v_i32mf2(__VA_ARGS__) |
| #define vloxseg2ei64_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i64m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_i64m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_i64m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i64m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_i64m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_i64m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i64m4 | ( | ... | ) | __riscv_vloxseg2ei64_v_i64m4(__VA_ARGS__) |
| #define vloxseg2ei64_v_i64m4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i8m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_i8m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_i8m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i8mf2 | ( | ... | ) | __riscv_vloxseg2ei64_v_i8mf2(__VA_ARGS__) |
| #define vloxseg2ei64_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i8mf4 | ( | ... | ) | __riscv_vloxseg2ei64_v_i8mf4(__VA_ARGS__) |
| #define vloxseg2ei64_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_i8mf8 | ( | ... | ) | __riscv_vloxseg2ei64_v_i8mf8(__VA_ARGS__) |
| #define vloxseg2ei64_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg2ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u16m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_u16m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_u16m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u16m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_u16m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_u16m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u16mf2 | ( | ... | ) | __riscv_vloxseg2ei64_v_u16mf2(__VA_ARGS__) |
| #define vloxseg2ei64_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u16mf4 | ( | ... | ) | __riscv_vloxseg2ei64_v_u16mf4(__VA_ARGS__) |
| #define vloxseg2ei64_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u32m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_u32m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_u32m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u32m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_u32m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_u32m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u32m4 | ( | ... | ) | __riscv_vloxseg2ei64_v_u32m4(__VA_ARGS__) |
| #define vloxseg2ei64_v_u32m4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u32mf2 | ( | ... | ) | __riscv_vloxseg2ei64_v_u32mf2(__VA_ARGS__) |
| #define vloxseg2ei64_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u64m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_u64m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_u64m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u64m2 | ( | ... | ) | __riscv_vloxseg2ei64_v_u64m2(__VA_ARGS__) |
| #define vloxseg2ei64_v_u64m2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u64m4 | ( | ... | ) | __riscv_vloxseg2ei64_v_u64m4(__VA_ARGS__) |
| #define vloxseg2ei64_v_u64m4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u8m1 | ( | ... | ) | __riscv_vloxseg2ei64_v_u8m1(__VA_ARGS__) |
| #define vloxseg2ei64_v_u8m1_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u8mf2 | ( | ... | ) | __riscv_vloxseg2ei64_v_u8mf2(__VA_ARGS__) |
| #define vloxseg2ei64_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u8mf4 | ( | ... | ) | __riscv_vloxseg2ei64_v_u8mf4(__VA_ARGS__) |
| #define vloxseg2ei64_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei64_v_u8mf8 | ( | ... | ) | __riscv_vloxseg2ei64_v_u8mf8(__VA_ARGS__) |
| #define vloxseg2ei64_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg2ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_f16m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_f16m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_f16m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16mf2 | ( | ... | ) | __riscv_vloxseg2ei8_v_f16mf2(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16mf4 | ( | ... | ) | __riscv_vloxseg2ei8_v_f16mf4(__VA_ARGS__) |
| #define vloxseg2ei8_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f32m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_f32m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_f32m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f32m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_f32m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_f32m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f32m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_f32m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_f32m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f32mf2 | ( | ... | ) | __riscv_vloxseg2ei8_v_f32mf2(__VA_ARGS__) |
| #define vloxseg2ei8_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f64m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_f64m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_f64m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f64m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_f64m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_f64m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_f64m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_f64m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_f64m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_f64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_i16m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_i16m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_i16m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16mf2 | ( | ... | ) | __riscv_vloxseg2ei8_v_i16mf2(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16mf4 | ( | ... | ) | __riscv_vloxseg2ei8_v_i16mf4(__VA_ARGS__) |
| #define vloxseg2ei8_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i32m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_i32m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_i32m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i32m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_i32m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_i32m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i32m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_i32m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_i32m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i32mf2 | ( | ... | ) | __riscv_vloxseg2ei8_v_i32mf2(__VA_ARGS__) |
| #define vloxseg2ei8_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i64m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_i64m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_i64m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i64m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_i64m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_i64m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i64m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_i64m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_i64m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_i8m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_i8m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_i8m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i8m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8mf2 | ( | ... | ) | __riscv_vloxseg2ei8_v_i8mf2(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8mf4 | ( | ... | ) | __riscv_vloxseg2ei8_v_i8mf4(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8mf8 | ( | ... | ) | __riscv_vloxseg2ei8_v_i8mf8(__VA_ARGS__) |
| #define vloxseg2ei8_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg2ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_u16m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_u16m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_u16m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u16m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16mf2 | ( | ... | ) | __riscv_vloxseg2ei8_v_u16mf2(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16mf4 | ( | ... | ) | __riscv_vloxseg2ei8_v_u16mf4(__VA_ARGS__) |
| #define vloxseg2ei8_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u32m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_u32m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_u32m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u32m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_u32m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_u32m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u32m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_u32m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_u32m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u32m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u32mf2 | ( | ... | ) | __riscv_vloxseg2ei8_v_u32mf2(__VA_ARGS__) |
| #define vloxseg2ei8_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u64m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_u64m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_u64m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u64m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_u64m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_u64m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u64m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_u64m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_u64m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u64m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8m1 | ( | ... | ) | __riscv_vloxseg2ei8_v_u8m1(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8m1_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8m2 | ( | ... | ) | __riscv_vloxseg2ei8_v_u8m2(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8m2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8m4 | ( | ... | ) | __riscv_vloxseg2ei8_v_u8m4(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8m4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u8m4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8mf2 | ( | ... | ) | __riscv_vloxseg2ei8_v_u8mf2(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8mf4 | ( | ... | ) | __riscv_vloxseg2ei8_v_u8mf4(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8mf8 | ( | ... | ) | __riscv_vloxseg2ei8_v_u8mf8(__VA_ARGS__) |
| #define vloxseg2ei8_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg2ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f16m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_f16m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_f16m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f16m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_f16m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_f16m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f16mf2 | ( | ... | ) | __riscv_vloxseg3ei16_v_f16mf2(__VA_ARGS__) |
| #define vloxseg3ei16_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f16mf4 | ( | ... | ) | __riscv_vloxseg3ei16_v_f16mf4(__VA_ARGS__) |
| #define vloxseg3ei16_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f32m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_f32m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_f32m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f32m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_f32m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_f32m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f32mf2 | ( | ... | ) | __riscv_vloxseg3ei16_v_f32mf2(__VA_ARGS__) |
| #define vloxseg3ei16_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f64m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_f64m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_f64m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_f64m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_f64m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_f64m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i16m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_i16m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_i16m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i16m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_i16m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_i16m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i16mf2 | ( | ... | ) | __riscv_vloxseg3ei16_v_i16mf2(__VA_ARGS__) |
| #define vloxseg3ei16_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i16mf4 | ( | ... | ) | __riscv_vloxseg3ei16_v_i16mf4(__VA_ARGS__) |
| #define vloxseg3ei16_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i32m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_i32m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_i32m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i32m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_i32m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_i32m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i32mf2 | ( | ... | ) | __riscv_vloxseg3ei16_v_i32mf2(__VA_ARGS__) |
| #define vloxseg3ei16_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i64m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_i64m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_i64m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i64m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_i64m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_i64m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_i8m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_i8m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8mf2 | ( | ... | ) | __riscv_vloxseg3ei16_v_i8mf2(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8mf4 | ( | ... | ) | __riscv_vloxseg3ei16_v_i8mf4(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8mf8 | ( | ... | ) | __riscv_vloxseg3ei16_v_i8mf8(__VA_ARGS__) |
| #define vloxseg3ei16_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg3ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u16m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_u16m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_u16m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u16m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_u16m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_u16m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u16mf2 | ( | ... | ) | __riscv_vloxseg3ei16_v_u16mf2(__VA_ARGS__) |
| #define vloxseg3ei16_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u16mf4 | ( | ... | ) | __riscv_vloxseg3ei16_v_u16mf4(__VA_ARGS__) |
| #define vloxseg3ei16_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u32m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_u32m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_u32m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u32m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_u32m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_u32m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u32mf2 | ( | ... | ) | __riscv_vloxseg3ei16_v_u32mf2(__VA_ARGS__) |
| #define vloxseg3ei16_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u64m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_u64m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_u64m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u64m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_u64m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_u64m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8m1 | ( | ... | ) | __riscv_vloxseg3ei16_v_u8m1(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8m1_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8m2 | ( | ... | ) | __riscv_vloxseg3ei16_v_u8m2(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8m2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8mf2 | ( | ... | ) | __riscv_vloxseg3ei16_v_u8mf2(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8mf4 | ( | ... | ) | __riscv_vloxseg3ei16_v_u8mf4(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8mf8 | ( | ... | ) | __riscv_vloxseg3ei16_v_u8mf8(__VA_ARGS__) |
| #define vloxseg3ei16_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg3ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f16m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_f16m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_f16m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f16m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_f16m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_f16m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f16mf2 | ( | ... | ) | __riscv_vloxseg3ei32_v_f16mf2(__VA_ARGS__) |
| #define vloxseg3ei32_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f16mf4 | ( | ... | ) | __riscv_vloxseg3ei32_v_f16mf4(__VA_ARGS__) |
| #define vloxseg3ei32_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f32m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_f32m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_f32m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f32m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_f32m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_f32m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f32mf2 | ( | ... | ) | __riscv_vloxseg3ei32_v_f32mf2(__VA_ARGS__) |
| #define vloxseg3ei32_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f64m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_f64m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_f64m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_f64m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_f64m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_f64m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i16m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_i16m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_i16m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i16m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_i16m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_i16m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i16mf2 | ( | ... | ) | __riscv_vloxseg3ei32_v_i16mf2(__VA_ARGS__) |
| #define vloxseg3ei32_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i16mf4 | ( | ... | ) | __riscv_vloxseg3ei32_v_i16mf4(__VA_ARGS__) |
| #define vloxseg3ei32_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i32m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_i32m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_i32m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i32m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_i32m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_i32m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i32mf2 | ( | ... | ) | __riscv_vloxseg3ei32_v_i32mf2(__VA_ARGS__) |
| #define vloxseg3ei32_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i64m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_i64m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_i64m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i64m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_i64m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_i64m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_i8m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_i8m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8mf2 | ( | ... | ) | __riscv_vloxseg3ei32_v_i8mf2(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8mf4 | ( | ... | ) | __riscv_vloxseg3ei32_v_i8mf4(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8mf8 | ( | ... | ) | __riscv_vloxseg3ei32_v_i8mf8(__VA_ARGS__) |
| #define vloxseg3ei32_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg3ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u16m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_u16m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_u16m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u16m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_u16m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_u16m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u16mf2 | ( | ... | ) | __riscv_vloxseg3ei32_v_u16mf2(__VA_ARGS__) |
| #define vloxseg3ei32_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u16mf4 | ( | ... | ) | __riscv_vloxseg3ei32_v_u16mf4(__VA_ARGS__) |
| #define vloxseg3ei32_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u32m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_u32m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_u32m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u32m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_u32m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_u32m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u32mf2 | ( | ... | ) | __riscv_vloxseg3ei32_v_u32mf2(__VA_ARGS__) |
| #define vloxseg3ei32_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u64m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_u64m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_u64m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u64m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_u64m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_u64m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8m1 | ( | ... | ) | __riscv_vloxseg3ei32_v_u8m1(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8m1_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8m2 | ( | ... | ) | __riscv_vloxseg3ei32_v_u8m2(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8m2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8mf2 | ( | ... | ) | __riscv_vloxseg3ei32_v_u8mf2(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8mf4 | ( | ... | ) | __riscv_vloxseg3ei32_v_u8mf4(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8mf8 | ( | ... | ) | __riscv_vloxseg3ei32_v_u8mf8(__VA_ARGS__) |
| #define vloxseg3ei32_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg3ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f16m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_f16m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_f16m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f16m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_f16m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_f16m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f16mf2 | ( | ... | ) | __riscv_vloxseg3ei64_v_f16mf2(__VA_ARGS__) |
| #define vloxseg3ei64_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f16mf4 | ( | ... | ) | __riscv_vloxseg3ei64_v_f16mf4(__VA_ARGS__) |
| #define vloxseg3ei64_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f32m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_f32m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_f32m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f32m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_f32m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_f32m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f32mf2 | ( | ... | ) | __riscv_vloxseg3ei64_v_f32mf2(__VA_ARGS__) |
| #define vloxseg3ei64_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f64m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_f64m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_f64m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_f64m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_f64m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_f64m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i16m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_i16m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_i16m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i16m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_i16m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_i16m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i16mf2 | ( | ... | ) | __riscv_vloxseg3ei64_v_i16mf2(__VA_ARGS__) |
| #define vloxseg3ei64_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i16mf4 | ( | ... | ) | __riscv_vloxseg3ei64_v_i16mf4(__VA_ARGS__) |
| #define vloxseg3ei64_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i32m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_i32m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_i32m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i32m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_i32m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_i32m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i32mf2 | ( | ... | ) | __riscv_vloxseg3ei64_v_i32mf2(__VA_ARGS__) |
| #define vloxseg3ei64_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i64m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_i64m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_i64m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i64m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_i64m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_i64m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i8m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_i8m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_i8m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i8mf2 | ( | ... | ) | __riscv_vloxseg3ei64_v_i8mf2(__VA_ARGS__) |
| #define vloxseg3ei64_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i8mf4 | ( | ... | ) | __riscv_vloxseg3ei64_v_i8mf4(__VA_ARGS__) |
| #define vloxseg3ei64_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_i8mf8 | ( | ... | ) | __riscv_vloxseg3ei64_v_i8mf8(__VA_ARGS__) |
| #define vloxseg3ei64_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg3ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u16m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_u16m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_u16m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u16m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_u16m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_u16m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u16mf2 | ( | ... | ) | __riscv_vloxseg3ei64_v_u16mf2(__VA_ARGS__) |
| #define vloxseg3ei64_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u16mf4 | ( | ... | ) | __riscv_vloxseg3ei64_v_u16mf4(__VA_ARGS__) |
| #define vloxseg3ei64_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u32m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_u32m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_u32m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u32m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_u32m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_u32m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u32mf2 | ( | ... | ) | __riscv_vloxseg3ei64_v_u32mf2(__VA_ARGS__) |
| #define vloxseg3ei64_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u64m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_u64m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_u64m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u64m2 | ( | ... | ) | __riscv_vloxseg3ei64_v_u64m2(__VA_ARGS__) |
| #define vloxseg3ei64_v_u64m2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u8m1 | ( | ... | ) | __riscv_vloxseg3ei64_v_u8m1(__VA_ARGS__) |
| #define vloxseg3ei64_v_u8m1_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u8mf2 | ( | ... | ) | __riscv_vloxseg3ei64_v_u8mf2(__VA_ARGS__) |
| #define vloxseg3ei64_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u8mf4 | ( | ... | ) | __riscv_vloxseg3ei64_v_u8mf4(__VA_ARGS__) |
| #define vloxseg3ei64_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei64_v_u8mf8 | ( | ... | ) | __riscv_vloxseg3ei64_v_u8mf8(__VA_ARGS__) |
| #define vloxseg3ei64_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg3ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f16m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_f16m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_f16m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f16m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_f16m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_f16m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f16mf2 | ( | ... | ) | __riscv_vloxseg3ei8_v_f16mf2(__VA_ARGS__) |
| #define vloxseg3ei8_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f16mf4 | ( | ... | ) | __riscv_vloxseg3ei8_v_f16mf4(__VA_ARGS__) |
| #define vloxseg3ei8_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f32m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_f32m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_f32m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f32m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_f32m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_f32m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f32mf2 | ( | ... | ) | __riscv_vloxseg3ei8_v_f32mf2(__VA_ARGS__) |
| #define vloxseg3ei8_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f64m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_f64m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_f64m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_f64m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_f64m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_f64m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i16m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_i16m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_i16m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i16m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_i16m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_i16m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i16mf2 | ( | ... | ) | __riscv_vloxseg3ei8_v_i16mf2(__VA_ARGS__) |
| #define vloxseg3ei8_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i16mf4 | ( | ... | ) | __riscv_vloxseg3ei8_v_i16mf4(__VA_ARGS__) |
| #define vloxseg3ei8_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i32m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_i32m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_i32m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i32m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_i32m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_i32m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i32mf2 | ( | ... | ) | __riscv_vloxseg3ei8_v_i32mf2(__VA_ARGS__) |
| #define vloxseg3ei8_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i64m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_i64m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_i64m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i64m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_i64m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_i64m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_i8m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_i8m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8mf2 | ( | ... | ) | __riscv_vloxseg3ei8_v_i8mf2(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8mf4 | ( | ... | ) | __riscv_vloxseg3ei8_v_i8mf4(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8mf8 | ( | ... | ) | __riscv_vloxseg3ei8_v_i8mf8(__VA_ARGS__) |
| #define vloxseg3ei8_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg3ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u16m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_u16m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_u16m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u16m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_u16m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_u16m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u16mf2 | ( | ... | ) | __riscv_vloxseg3ei8_v_u16mf2(__VA_ARGS__) |
| #define vloxseg3ei8_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u16mf4 | ( | ... | ) | __riscv_vloxseg3ei8_v_u16mf4(__VA_ARGS__) |
| #define vloxseg3ei8_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u32m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_u32m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_u32m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u32m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_u32m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_u32m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u32mf2 | ( | ... | ) | __riscv_vloxseg3ei8_v_u32mf2(__VA_ARGS__) |
| #define vloxseg3ei8_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u64m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_u64m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_u64m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u64m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_u64m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_u64m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8m1 | ( | ... | ) | __riscv_vloxseg3ei8_v_u8m1(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8m1_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8m2 | ( | ... | ) | __riscv_vloxseg3ei8_v_u8m2(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8m2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8mf2 | ( | ... | ) | __riscv_vloxseg3ei8_v_u8mf2(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8mf4 | ( | ... | ) | __riscv_vloxseg3ei8_v_u8mf4(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8mf8 | ( | ... | ) | __riscv_vloxseg3ei8_v_u8mf8(__VA_ARGS__) |
| #define vloxseg3ei8_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg3ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f16m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_f16m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_f16m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f16m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_f16m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_f16m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f16mf2 | ( | ... | ) | __riscv_vloxseg4ei16_v_f16mf2(__VA_ARGS__) |
| #define vloxseg4ei16_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f16mf4 | ( | ... | ) | __riscv_vloxseg4ei16_v_f16mf4(__VA_ARGS__) |
| #define vloxseg4ei16_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f32m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_f32m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_f32m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f32m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_f32m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_f32m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f32mf2 | ( | ... | ) | __riscv_vloxseg4ei16_v_f32mf2(__VA_ARGS__) |
| #define vloxseg4ei16_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f64m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_f64m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_f64m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_f64m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_f64m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_f64m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i16m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_i16m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_i16m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i16m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_i16m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_i16m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i16mf2 | ( | ... | ) | __riscv_vloxseg4ei16_v_i16mf2(__VA_ARGS__) |
| #define vloxseg4ei16_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i16mf4 | ( | ... | ) | __riscv_vloxseg4ei16_v_i16mf4(__VA_ARGS__) |
| #define vloxseg4ei16_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i32m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_i32m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_i32m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i32m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_i32m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_i32m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i32mf2 | ( | ... | ) | __riscv_vloxseg4ei16_v_i32mf2(__VA_ARGS__) |
| #define vloxseg4ei16_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i64m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_i64m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_i64m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i64m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_i64m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_i64m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_i8m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_i8m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8mf2 | ( | ... | ) | __riscv_vloxseg4ei16_v_i8mf2(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8mf4 | ( | ... | ) | __riscv_vloxseg4ei16_v_i8mf4(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8mf8 | ( | ... | ) | __riscv_vloxseg4ei16_v_i8mf8(__VA_ARGS__) |
| #define vloxseg4ei16_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg4ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u16m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_u16m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_u16m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u16m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_u16m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_u16m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u16mf2 | ( | ... | ) | __riscv_vloxseg4ei16_v_u16mf2(__VA_ARGS__) |
| #define vloxseg4ei16_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u16mf4 | ( | ... | ) | __riscv_vloxseg4ei16_v_u16mf4(__VA_ARGS__) |
| #define vloxseg4ei16_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u32m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_u32m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_u32m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u32m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_u32m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_u32m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u32mf2 | ( | ... | ) | __riscv_vloxseg4ei16_v_u32mf2(__VA_ARGS__) |
| #define vloxseg4ei16_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u64m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_u64m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_u64m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u64m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_u64m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_u64m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8m1 | ( | ... | ) | __riscv_vloxseg4ei16_v_u8m1(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8m1_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8m2 | ( | ... | ) | __riscv_vloxseg4ei16_v_u8m2(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8m2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8mf2 | ( | ... | ) | __riscv_vloxseg4ei16_v_u8mf2(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8mf4 | ( | ... | ) | __riscv_vloxseg4ei16_v_u8mf4(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8mf8 | ( | ... | ) | __riscv_vloxseg4ei16_v_u8mf8(__VA_ARGS__) |
| #define vloxseg4ei16_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg4ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f16m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_f16m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_f16m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f16m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_f16m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_f16m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f16mf2 | ( | ... | ) | __riscv_vloxseg4ei32_v_f16mf2(__VA_ARGS__) |
| #define vloxseg4ei32_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f16mf4 | ( | ... | ) | __riscv_vloxseg4ei32_v_f16mf4(__VA_ARGS__) |
| #define vloxseg4ei32_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f32m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_f32m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_f32m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f32m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_f32m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_f32m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f32mf2 | ( | ... | ) | __riscv_vloxseg4ei32_v_f32mf2(__VA_ARGS__) |
| #define vloxseg4ei32_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f64m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_f64m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_f64m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_f64m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_f64m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_f64m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i16m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_i16m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_i16m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i16m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_i16m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_i16m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i16mf2 | ( | ... | ) | __riscv_vloxseg4ei32_v_i16mf2(__VA_ARGS__) |
| #define vloxseg4ei32_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i16mf4 | ( | ... | ) | __riscv_vloxseg4ei32_v_i16mf4(__VA_ARGS__) |
| #define vloxseg4ei32_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i32m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_i32m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_i32m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i32m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_i32m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_i32m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i32mf2 | ( | ... | ) | __riscv_vloxseg4ei32_v_i32mf2(__VA_ARGS__) |
| #define vloxseg4ei32_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i64m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_i64m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_i64m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i64m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_i64m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_i64m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_i8m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_i8m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8mf2 | ( | ... | ) | __riscv_vloxseg4ei32_v_i8mf2(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8mf4 | ( | ... | ) | __riscv_vloxseg4ei32_v_i8mf4(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8mf8 | ( | ... | ) | __riscv_vloxseg4ei32_v_i8mf8(__VA_ARGS__) |
| #define vloxseg4ei32_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg4ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u16m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_u16m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_u16m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u16m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_u16m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_u16m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u16mf2 | ( | ... | ) | __riscv_vloxseg4ei32_v_u16mf2(__VA_ARGS__) |
| #define vloxseg4ei32_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u16mf4 | ( | ... | ) | __riscv_vloxseg4ei32_v_u16mf4(__VA_ARGS__) |
| #define vloxseg4ei32_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u32m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_u32m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_u32m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u32m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_u32m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_u32m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u32mf2 | ( | ... | ) | __riscv_vloxseg4ei32_v_u32mf2(__VA_ARGS__) |
| #define vloxseg4ei32_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u64m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_u64m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_u64m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u64m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_u64m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_u64m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8m1 | ( | ... | ) | __riscv_vloxseg4ei32_v_u8m1(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8m1_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8m2 | ( | ... | ) | __riscv_vloxseg4ei32_v_u8m2(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8m2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8mf2 | ( | ... | ) | __riscv_vloxseg4ei32_v_u8mf2(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8mf4 | ( | ... | ) | __riscv_vloxseg4ei32_v_u8mf4(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8mf8 | ( | ... | ) | __riscv_vloxseg4ei32_v_u8mf8(__VA_ARGS__) |
| #define vloxseg4ei32_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg4ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f16m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_f16m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_f16m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f16m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_f16m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_f16m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f16mf2 | ( | ... | ) | __riscv_vloxseg4ei64_v_f16mf2(__VA_ARGS__) |
| #define vloxseg4ei64_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f16mf4 | ( | ... | ) | __riscv_vloxseg4ei64_v_f16mf4(__VA_ARGS__) |
| #define vloxseg4ei64_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f32m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_f32m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_f32m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f32m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_f32m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_f32m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f32mf2 | ( | ... | ) | __riscv_vloxseg4ei64_v_f32mf2(__VA_ARGS__) |
| #define vloxseg4ei64_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f64m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_f64m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_f64m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_f64m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_f64m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_f64m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i16m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_i16m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_i16m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i16m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_i16m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_i16m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i16mf2 | ( | ... | ) | __riscv_vloxseg4ei64_v_i16mf2(__VA_ARGS__) |
| #define vloxseg4ei64_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i16mf4 | ( | ... | ) | __riscv_vloxseg4ei64_v_i16mf4(__VA_ARGS__) |
| #define vloxseg4ei64_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i32m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_i32m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_i32m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i32m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_i32m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_i32m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i32mf2 | ( | ... | ) | __riscv_vloxseg4ei64_v_i32mf2(__VA_ARGS__) |
| #define vloxseg4ei64_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i64m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_i64m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_i64m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i64m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_i64m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_i64m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i8m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_i8m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_i8m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i8mf2 | ( | ... | ) | __riscv_vloxseg4ei64_v_i8mf2(__VA_ARGS__) |
| #define vloxseg4ei64_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i8mf4 | ( | ... | ) | __riscv_vloxseg4ei64_v_i8mf4(__VA_ARGS__) |
| #define vloxseg4ei64_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_i8mf8 | ( | ... | ) | __riscv_vloxseg4ei64_v_i8mf8(__VA_ARGS__) |
| #define vloxseg4ei64_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg4ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u16m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_u16m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_u16m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u16m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_u16m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_u16m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u16mf2 | ( | ... | ) | __riscv_vloxseg4ei64_v_u16mf2(__VA_ARGS__) |
| #define vloxseg4ei64_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u16mf4 | ( | ... | ) | __riscv_vloxseg4ei64_v_u16mf4(__VA_ARGS__) |
| #define vloxseg4ei64_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u32m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_u32m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_u32m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u32m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_u32m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_u32m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u32mf2 | ( | ... | ) | __riscv_vloxseg4ei64_v_u32mf2(__VA_ARGS__) |
| #define vloxseg4ei64_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u64m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_u64m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_u64m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u64m2 | ( | ... | ) | __riscv_vloxseg4ei64_v_u64m2(__VA_ARGS__) |
| #define vloxseg4ei64_v_u64m2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u8m1 | ( | ... | ) | __riscv_vloxseg4ei64_v_u8m1(__VA_ARGS__) |
| #define vloxseg4ei64_v_u8m1_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u8mf2 | ( | ... | ) | __riscv_vloxseg4ei64_v_u8mf2(__VA_ARGS__) |
| #define vloxseg4ei64_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u8mf4 | ( | ... | ) | __riscv_vloxseg4ei64_v_u8mf4(__VA_ARGS__) |
| #define vloxseg4ei64_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei64_v_u8mf8 | ( | ... | ) | __riscv_vloxseg4ei64_v_u8mf8(__VA_ARGS__) |
| #define vloxseg4ei64_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg4ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f16m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_f16m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_f16m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f16m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_f16m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_f16m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f16mf2 | ( | ... | ) | __riscv_vloxseg4ei8_v_f16mf2(__VA_ARGS__) |
| #define vloxseg4ei8_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f16mf4 | ( | ... | ) | __riscv_vloxseg4ei8_v_f16mf4(__VA_ARGS__) |
| #define vloxseg4ei8_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f32m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_f32m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_f32m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f32m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_f32m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_f32m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f32mf2 | ( | ... | ) | __riscv_vloxseg4ei8_v_f32mf2(__VA_ARGS__) |
| #define vloxseg4ei8_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f64m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_f64m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_f64m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_f64m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_f64m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_f64m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i16m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_i16m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_i16m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i16m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_i16m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_i16m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i16mf2 | ( | ... | ) | __riscv_vloxseg4ei8_v_i16mf2(__VA_ARGS__) |
| #define vloxseg4ei8_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i16mf4 | ( | ... | ) | __riscv_vloxseg4ei8_v_i16mf4(__VA_ARGS__) |
| #define vloxseg4ei8_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i32m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_i32m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_i32m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i32m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_i32m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_i32m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i32mf2 | ( | ... | ) | __riscv_vloxseg4ei8_v_i32mf2(__VA_ARGS__) |
| #define vloxseg4ei8_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i64m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_i64m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_i64m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i64m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_i64m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_i64m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_i8m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_i8m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8mf2 | ( | ... | ) | __riscv_vloxseg4ei8_v_i8mf2(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8mf4 | ( | ... | ) | __riscv_vloxseg4ei8_v_i8mf4(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8mf8 | ( | ... | ) | __riscv_vloxseg4ei8_v_i8mf8(__VA_ARGS__) |
| #define vloxseg4ei8_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg4ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u16m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_u16m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_u16m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u16m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_u16m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_u16m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u16mf2 | ( | ... | ) | __riscv_vloxseg4ei8_v_u16mf2(__VA_ARGS__) |
| #define vloxseg4ei8_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u16mf4 | ( | ... | ) | __riscv_vloxseg4ei8_v_u16mf4(__VA_ARGS__) |
| #define vloxseg4ei8_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u32m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_u32m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_u32m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u32m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_u32m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_u32m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u32mf2 | ( | ... | ) | __riscv_vloxseg4ei8_v_u32mf2(__VA_ARGS__) |
| #define vloxseg4ei8_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u64m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_u64m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_u64m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u64m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_u64m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_u64m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8m1 | ( | ... | ) | __riscv_vloxseg4ei8_v_u8m1(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8m1_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8m2 | ( | ... | ) | __riscv_vloxseg4ei8_v_u8m2(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8m2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8mf2 | ( | ... | ) | __riscv_vloxseg4ei8_v_u8mf2(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8mf4 | ( | ... | ) | __riscv_vloxseg4ei8_v_u8mf4(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8mf8 | ( | ... | ) | __riscv_vloxseg4ei8_v_u8mf8(__VA_ARGS__) |
| #define vloxseg4ei8_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg4ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_f16m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_f16m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_f16m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_f16mf2 | ( | ... | ) | __riscv_vloxseg5ei16_v_f16mf2(__VA_ARGS__) |
| #define vloxseg5ei16_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg5ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_f16mf4 | ( | ... | ) | __riscv_vloxseg5ei16_v_f16mf4(__VA_ARGS__) |
| #define vloxseg5ei16_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg5ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_f32m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_f32m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_f32m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_f32mf2 | ( | ... | ) | __riscv_vloxseg5ei16_v_f32mf2(__VA_ARGS__) |
| #define vloxseg5ei16_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg5ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_f64m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_f64m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_f64m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i16m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_i16m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_i16m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i16mf2 | ( | ... | ) | __riscv_vloxseg5ei16_v_i16mf2(__VA_ARGS__) |
| #define vloxseg5ei16_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i16mf4 | ( | ... | ) | __riscv_vloxseg5ei16_v_i16mf4(__VA_ARGS__) |
| #define vloxseg5ei16_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i32m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_i32m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_i32m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i32mf2 | ( | ... | ) | __riscv_vloxseg5ei16_v_i32mf2(__VA_ARGS__) |
| #define vloxseg5ei16_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i64m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_i64m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_i64m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i8m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_i8m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_i8m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i8mf2 | ( | ... | ) | __riscv_vloxseg5ei16_v_i8mf2(__VA_ARGS__) |
| #define vloxseg5ei16_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i8mf4 | ( | ... | ) | __riscv_vloxseg5ei16_v_i8mf4(__VA_ARGS__) |
| #define vloxseg5ei16_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_i8mf8 | ( | ... | ) | __riscv_vloxseg5ei16_v_i8mf8(__VA_ARGS__) |
| #define vloxseg5ei16_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg5ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u16m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_u16m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_u16m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u16mf2 | ( | ... | ) | __riscv_vloxseg5ei16_v_u16mf2(__VA_ARGS__) |
| #define vloxseg5ei16_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u16mf4 | ( | ... | ) | __riscv_vloxseg5ei16_v_u16mf4(__VA_ARGS__) |
| #define vloxseg5ei16_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u32m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_u32m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_u32m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u32mf2 | ( | ... | ) | __riscv_vloxseg5ei16_v_u32mf2(__VA_ARGS__) |
| #define vloxseg5ei16_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u64m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_u64m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_u64m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u8m1 | ( | ... | ) | __riscv_vloxseg5ei16_v_u8m1(__VA_ARGS__) |
| #define vloxseg5ei16_v_u8m1_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u8mf2 | ( | ... | ) | __riscv_vloxseg5ei16_v_u8mf2(__VA_ARGS__) |
| #define vloxseg5ei16_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u8mf4 | ( | ... | ) | __riscv_vloxseg5ei16_v_u8mf4(__VA_ARGS__) |
| #define vloxseg5ei16_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei16_v_u8mf8 | ( | ... | ) | __riscv_vloxseg5ei16_v_u8mf8(__VA_ARGS__) |
| #define vloxseg5ei16_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg5ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_f16m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_f16m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_f16m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_f16mf2 | ( | ... | ) | __riscv_vloxseg5ei32_v_f16mf2(__VA_ARGS__) |
| #define vloxseg5ei32_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg5ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_f16mf4 | ( | ... | ) | __riscv_vloxseg5ei32_v_f16mf4(__VA_ARGS__) |
| #define vloxseg5ei32_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg5ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_f32m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_f32m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_f32m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_f32mf2 | ( | ... | ) | __riscv_vloxseg5ei32_v_f32mf2(__VA_ARGS__) |
| #define vloxseg5ei32_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg5ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_f64m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_f64m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_f64m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i16m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_i16m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_i16m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i16mf2 | ( | ... | ) | __riscv_vloxseg5ei32_v_i16mf2(__VA_ARGS__) |
| #define vloxseg5ei32_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i16mf4 | ( | ... | ) | __riscv_vloxseg5ei32_v_i16mf4(__VA_ARGS__) |
| #define vloxseg5ei32_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i32m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_i32m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_i32m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i32mf2 | ( | ... | ) | __riscv_vloxseg5ei32_v_i32mf2(__VA_ARGS__) |
| #define vloxseg5ei32_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i64m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_i64m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_i64m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i8m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_i8m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_i8m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i8mf2 | ( | ... | ) | __riscv_vloxseg5ei32_v_i8mf2(__VA_ARGS__) |
| #define vloxseg5ei32_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i8mf4 | ( | ... | ) | __riscv_vloxseg5ei32_v_i8mf4(__VA_ARGS__) |
| #define vloxseg5ei32_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_i8mf8 | ( | ... | ) | __riscv_vloxseg5ei32_v_i8mf8(__VA_ARGS__) |
| #define vloxseg5ei32_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg5ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u16m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_u16m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_u16m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u16mf2 | ( | ... | ) | __riscv_vloxseg5ei32_v_u16mf2(__VA_ARGS__) |
| #define vloxseg5ei32_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u16mf4 | ( | ... | ) | __riscv_vloxseg5ei32_v_u16mf4(__VA_ARGS__) |
| #define vloxseg5ei32_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u32m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_u32m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_u32m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u32mf2 | ( | ... | ) | __riscv_vloxseg5ei32_v_u32mf2(__VA_ARGS__) |
| #define vloxseg5ei32_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u64m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_u64m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_u64m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u8m1 | ( | ... | ) | __riscv_vloxseg5ei32_v_u8m1(__VA_ARGS__) |
| #define vloxseg5ei32_v_u8m1_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u8mf2 | ( | ... | ) | __riscv_vloxseg5ei32_v_u8mf2(__VA_ARGS__) |
| #define vloxseg5ei32_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u8mf4 | ( | ... | ) | __riscv_vloxseg5ei32_v_u8mf4(__VA_ARGS__) |
| #define vloxseg5ei32_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei32_v_u8mf8 | ( | ... | ) | __riscv_vloxseg5ei32_v_u8mf8(__VA_ARGS__) |
| #define vloxseg5ei32_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg5ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_f16m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_f16m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_f16m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_f16mf2 | ( | ... | ) | __riscv_vloxseg5ei64_v_f16mf2(__VA_ARGS__) |
| #define vloxseg5ei64_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg5ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_f16mf4 | ( | ... | ) | __riscv_vloxseg5ei64_v_f16mf4(__VA_ARGS__) |
| #define vloxseg5ei64_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg5ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_f32m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_f32m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_f32m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_f32mf2 | ( | ... | ) | __riscv_vloxseg5ei64_v_f32mf2(__VA_ARGS__) |
| #define vloxseg5ei64_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg5ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_f64m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_f64m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_f64m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i16m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_i16m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_i16m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i16mf2 | ( | ... | ) | __riscv_vloxseg5ei64_v_i16mf2(__VA_ARGS__) |
| #define vloxseg5ei64_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i16mf4 | ( | ... | ) | __riscv_vloxseg5ei64_v_i16mf4(__VA_ARGS__) |
| #define vloxseg5ei64_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i32m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_i32m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_i32m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i32mf2 | ( | ... | ) | __riscv_vloxseg5ei64_v_i32mf2(__VA_ARGS__) |
| #define vloxseg5ei64_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i64m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_i64m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_i64m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i8m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_i8m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_i8m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i8mf2 | ( | ... | ) | __riscv_vloxseg5ei64_v_i8mf2(__VA_ARGS__) |
| #define vloxseg5ei64_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i8mf4 | ( | ... | ) | __riscv_vloxseg5ei64_v_i8mf4(__VA_ARGS__) |
| #define vloxseg5ei64_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_i8mf8 | ( | ... | ) | __riscv_vloxseg5ei64_v_i8mf8(__VA_ARGS__) |
| #define vloxseg5ei64_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg5ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u16m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_u16m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_u16m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u16mf2 | ( | ... | ) | __riscv_vloxseg5ei64_v_u16mf2(__VA_ARGS__) |
| #define vloxseg5ei64_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u16mf4 | ( | ... | ) | __riscv_vloxseg5ei64_v_u16mf4(__VA_ARGS__) |
| #define vloxseg5ei64_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u32m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_u32m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_u32m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u32mf2 | ( | ... | ) | __riscv_vloxseg5ei64_v_u32mf2(__VA_ARGS__) |
| #define vloxseg5ei64_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u64m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_u64m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_u64m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u8m1 | ( | ... | ) | __riscv_vloxseg5ei64_v_u8m1(__VA_ARGS__) |
| #define vloxseg5ei64_v_u8m1_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u8mf2 | ( | ... | ) | __riscv_vloxseg5ei64_v_u8mf2(__VA_ARGS__) |
| #define vloxseg5ei64_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u8mf4 | ( | ... | ) | __riscv_vloxseg5ei64_v_u8mf4(__VA_ARGS__) |
| #define vloxseg5ei64_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei64_v_u8mf8 | ( | ... | ) | __riscv_vloxseg5ei64_v_u8mf8(__VA_ARGS__) |
| #define vloxseg5ei64_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg5ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_f16m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_f16m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_f16m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_f16mf2 | ( | ... | ) | __riscv_vloxseg5ei8_v_f16mf2(__VA_ARGS__) |
| #define vloxseg5ei8_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg5ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_f16mf4 | ( | ... | ) | __riscv_vloxseg5ei8_v_f16mf4(__VA_ARGS__) |
| #define vloxseg5ei8_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg5ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_f32m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_f32m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_f32m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_f32mf2 | ( | ... | ) | __riscv_vloxseg5ei8_v_f32mf2(__VA_ARGS__) |
| #define vloxseg5ei8_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg5ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_f64m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_f64m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_f64m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i16m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_i16m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_i16m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i16mf2 | ( | ... | ) | __riscv_vloxseg5ei8_v_i16mf2(__VA_ARGS__) |
| #define vloxseg5ei8_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i16mf4 | ( | ... | ) | __riscv_vloxseg5ei8_v_i16mf4(__VA_ARGS__) |
| #define vloxseg5ei8_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i32m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_i32m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_i32m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i32mf2 | ( | ... | ) | __riscv_vloxseg5ei8_v_i32mf2(__VA_ARGS__) |
| #define vloxseg5ei8_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i64m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_i64m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_i64m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i8m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_i8m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_i8m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i8mf2 | ( | ... | ) | __riscv_vloxseg5ei8_v_i8mf2(__VA_ARGS__) |
| #define vloxseg5ei8_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i8mf4 | ( | ... | ) | __riscv_vloxseg5ei8_v_i8mf4(__VA_ARGS__) |
| #define vloxseg5ei8_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_i8mf8 | ( | ... | ) | __riscv_vloxseg5ei8_v_i8mf8(__VA_ARGS__) |
| #define vloxseg5ei8_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg5ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u16m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_u16m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_u16m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u16mf2 | ( | ... | ) | __riscv_vloxseg5ei8_v_u16mf2(__VA_ARGS__) |
| #define vloxseg5ei8_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u16mf4 | ( | ... | ) | __riscv_vloxseg5ei8_v_u16mf4(__VA_ARGS__) |
| #define vloxseg5ei8_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u32m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_u32m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_u32m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u32mf2 | ( | ... | ) | __riscv_vloxseg5ei8_v_u32mf2(__VA_ARGS__) |
| #define vloxseg5ei8_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u64m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_u64m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_u64m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u8m1 | ( | ... | ) | __riscv_vloxseg5ei8_v_u8m1(__VA_ARGS__) |
| #define vloxseg5ei8_v_u8m1_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u8mf2 | ( | ... | ) | __riscv_vloxseg5ei8_v_u8mf2(__VA_ARGS__) |
| #define vloxseg5ei8_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u8mf4 | ( | ... | ) | __riscv_vloxseg5ei8_v_u8mf4(__VA_ARGS__) |
| #define vloxseg5ei8_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg5ei8_v_u8mf8 | ( | ... | ) | __riscv_vloxseg5ei8_v_u8mf8(__VA_ARGS__) |
| #define vloxseg5ei8_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg5ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_f16m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_f16m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_f16m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_f16mf2 | ( | ... | ) | __riscv_vloxseg6ei16_v_f16mf2(__VA_ARGS__) |
| #define vloxseg6ei16_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg6ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_f16mf4 | ( | ... | ) | __riscv_vloxseg6ei16_v_f16mf4(__VA_ARGS__) |
| #define vloxseg6ei16_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg6ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_f32m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_f32m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_f32m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_f32mf2 | ( | ... | ) | __riscv_vloxseg6ei16_v_f32mf2(__VA_ARGS__) |
| #define vloxseg6ei16_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg6ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_f64m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_f64m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_f64m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i16m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_i16m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_i16m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i16mf2 | ( | ... | ) | __riscv_vloxseg6ei16_v_i16mf2(__VA_ARGS__) |
| #define vloxseg6ei16_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i16mf4 | ( | ... | ) | __riscv_vloxseg6ei16_v_i16mf4(__VA_ARGS__) |
| #define vloxseg6ei16_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i32m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_i32m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_i32m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i32mf2 | ( | ... | ) | __riscv_vloxseg6ei16_v_i32mf2(__VA_ARGS__) |
| #define vloxseg6ei16_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i64m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_i64m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_i64m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i8m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_i8m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_i8m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i8mf2 | ( | ... | ) | __riscv_vloxseg6ei16_v_i8mf2(__VA_ARGS__) |
| #define vloxseg6ei16_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i8mf4 | ( | ... | ) | __riscv_vloxseg6ei16_v_i8mf4(__VA_ARGS__) |
| #define vloxseg6ei16_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_i8mf8 | ( | ... | ) | __riscv_vloxseg6ei16_v_i8mf8(__VA_ARGS__) |
| #define vloxseg6ei16_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg6ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u16m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_u16m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_u16m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u16mf2 | ( | ... | ) | __riscv_vloxseg6ei16_v_u16mf2(__VA_ARGS__) |
| #define vloxseg6ei16_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u16mf4 | ( | ... | ) | __riscv_vloxseg6ei16_v_u16mf4(__VA_ARGS__) |
| #define vloxseg6ei16_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u32m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_u32m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_u32m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u32mf2 | ( | ... | ) | __riscv_vloxseg6ei16_v_u32mf2(__VA_ARGS__) |
| #define vloxseg6ei16_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u64m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_u64m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_u64m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u8m1 | ( | ... | ) | __riscv_vloxseg6ei16_v_u8m1(__VA_ARGS__) |
| #define vloxseg6ei16_v_u8m1_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u8mf2 | ( | ... | ) | __riscv_vloxseg6ei16_v_u8mf2(__VA_ARGS__) |
| #define vloxseg6ei16_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u8mf4 | ( | ... | ) | __riscv_vloxseg6ei16_v_u8mf4(__VA_ARGS__) |
| #define vloxseg6ei16_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei16_v_u8mf8 | ( | ... | ) | __riscv_vloxseg6ei16_v_u8mf8(__VA_ARGS__) |
| #define vloxseg6ei16_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg6ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_f16m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_f16m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_f16m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_f16mf2 | ( | ... | ) | __riscv_vloxseg6ei32_v_f16mf2(__VA_ARGS__) |
| #define vloxseg6ei32_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg6ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_f16mf4 | ( | ... | ) | __riscv_vloxseg6ei32_v_f16mf4(__VA_ARGS__) |
| #define vloxseg6ei32_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg6ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_f32m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_f32m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_f32m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_f32mf2 | ( | ... | ) | __riscv_vloxseg6ei32_v_f32mf2(__VA_ARGS__) |
| #define vloxseg6ei32_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg6ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_f64m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_f64m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_f64m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i16m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_i16m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_i16m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i16mf2 | ( | ... | ) | __riscv_vloxseg6ei32_v_i16mf2(__VA_ARGS__) |
| #define vloxseg6ei32_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i16mf4 | ( | ... | ) | __riscv_vloxseg6ei32_v_i16mf4(__VA_ARGS__) |
| #define vloxseg6ei32_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i32m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_i32m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_i32m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i32mf2 | ( | ... | ) | __riscv_vloxseg6ei32_v_i32mf2(__VA_ARGS__) |
| #define vloxseg6ei32_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i64m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_i64m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_i64m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i8m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_i8m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_i8m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i8mf2 | ( | ... | ) | __riscv_vloxseg6ei32_v_i8mf2(__VA_ARGS__) |
| #define vloxseg6ei32_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i8mf4 | ( | ... | ) | __riscv_vloxseg6ei32_v_i8mf4(__VA_ARGS__) |
| #define vloxseg6ei32_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_i8mf8 | ( | ... | ) | __riscv_vloxseg6ei32_v_i8mf8(__VA_ARGS__) |
| #define vloxseg6ei32_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg6ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u16m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_u16m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_u16m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u16mf2 | ( | ... | ) | __riscv_vloxseg6ei32_v_u16mf2(__VA_ARGS__) |
| #define vloxseg6ei32_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u16mf4 | ( | ... | ) | __riscv_vloxseg6ei32_v_u16mf4(__VA_ARGS__) |
| #define vloxseg6ei32_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u32m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_u32m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_u32m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u32mf2 | ( | ... | ) | __riscv_vloxseg6ei32_v_u32mf2(__VA_ARGS__) |
| #define vloxseg6ei32_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u64m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_u64m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_u64m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u8m1 | ( | ... | ) | __riscv_vloxseg6ei32_v_u8m1(__VA_ARGS__) |
| #define vloxseg6ei32_v_u8m1_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u8mf2 | ( | ... | ) | __riscv_vloxseg6ei32_v_u8mf2(__VA_ARGS__) |
| #define vloxseg6ei32_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u8mf4 | ( | ... | ) | __riscv_vloxseg6ei32_v_u8mf4(__VA_ARGS__) |
| #define vloxseg6ei32_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei32_v_u8mf8 | ( | ... | ) | __riscv_vloxseg6ei32_v_u8mf8(__VA_ARGS__) |
| #define vloxseg6ei32_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg6ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_f16m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_f16m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_f16m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_f16mf2 | ( | ... | ) | __riscv_vloxseg6ei64_v_f16mf2(__VA_ARGS__) |
| #define vloxseg6ei64_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg6ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_f16mf4 | ( | ... | ) | __riscv_vloxseg6ei64_v_f16mf4(__VA_ARGS__) |
| #define vloxseg6ei64_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg6ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_f32m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_f32m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_f32m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_f32mf2 | ( | ... | ) | __riscv_vloxseg6ei64_v_f32mf2(__VA_ARGS__) |
| #define vloxseg6ei64_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg6ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_f64m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_f64m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_f64m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i16m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_i16m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_i16m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i16mf2 | ( | ... | ) | __riscv_vloxseg6ei64_v_i16mf2(__VA_ARGS__) |
| #define vloxseg6ei64_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i16mf4 | ( | ... | ) | __riscv_vloxseg6ei64_v_i16mf4(__VA_ARGS__) |
| #define vloxseg6ei64_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i32m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_i32m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_i32m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i32mf2 | ( | ... | ) | __riscv_vloxseg6ei64_v_i32mf2(__VA_ARGS__) |
| #define vloxseg6ei64_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i64m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_i64m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_i64m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i8m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_i8m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_i8m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i8mf2 | ( | ... | ) | __riscv_vloxseg6ei64_v_i8mf2(__VA_ARGS__) |
| #define vloxseg6ei64_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i8mf4 | ( | ... | ) | __riscv_vloxseg6ei64_v_i8mf4(__VA_ARGS__) |
| #define vloxseg6ei64_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_i8mf8 | ( | ... | ) | __riscv_vloxseg6ei64_v_i8mf8(__VA_ARGS__) |
| #define vloxseg6ei64_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg6ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u16m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_u16m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_u16m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u16mf2 | ( | ... | ) | __riscv_vloxseg6ei64_v_u16mf2(__VA_ARGS__) |
| #define vloxseg6ei64_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u16mf4 | ( | ... | ) | __riscv_vloxseg6ei64_v_u16mf4(__VA_ARGS__) |
| #define vloxseg6ei64_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u32m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_u32m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_u32m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u32mf2 | ( | ... | ) | __riscv_vloxseg6ei64_v_u32mf2(__VA_ARGS__) |
| #define vloxseg6ei64_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u64m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_u64m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_u64m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u8m1 | ( | ... | ) | __riscv_vloxseg6ei64_v_u8m1(__VA_ARGS__) |
| #define vloxseg6ei64_v_u8m1_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u8mf2 | ( | ... | ) | __riscv_vloxseg6ei64_v_u8mf2(__VA_ARGS__) |
| #define vloxseg6ei64_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u8mf4 | ( | ... | ) | __riscv_vloxseg6ei64_v_u8mf4(__VA_ARGS__) |
| #define vloxseg6ei64_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei64_v_u8mf8 | ( | ... | ) | __riscv_vloxseg6ei64_v_u8mf8(__VA_ARGS__) |
| #define vloxseg6ei64_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg6ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_f16m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_f16m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_f16m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_f16mf2 | ( | ... | ) | __riscv_vloxseg6ei8_v_f16mf2(__VA_ARGS__) |
| #define vloxseg6ei8_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg6ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_f16mf4 | ( | ... | ) | __riscv_vloxseg6ei8_v_f16mf4(__VA_ARGS__) |
| #define vloxseg6ei8_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg6ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_f32m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_f32m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_f32m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_f32mf2 | ( | ... | ) | __riscv_vloxseg6ei8_v_f32mf2(__VA_ARGS__) |
| #define vloxseg6ei8_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg6ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_f64m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_f64m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_f64m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i16m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_i16m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_i16m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i16mf2 | ( | ... | ) | __riscv_vloxseg6ei8_v_i16mf2(__VA_ARGS__) |
| #define vloxseg6ei8_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i16mf4 | ( | ... | ) | __riscv_vloxseg6ei8_v_i16mf4(__VA_ARGS__) |
| #define vloxseg6ei8_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i32m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_i32m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_i32m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i32mf2 | ( | ... | ) | __riscv_vloxseg6ei8_v_i32mf2(__VA_ARGS__) |
| #define vloxseg6ei8_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i64m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_i64m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_i64m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i8m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_i8m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_i8m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i8mf2 | ( | ... | ) | __riscv_vloxseg6ei8_v_i8mf2(__VA_ARGS__) |
| #define vloxseg6ei8_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i8mf4 | ( | ... | ) | __riscv_vloxseg6ei8_v_i8mf4(__VA_ARGS__) |
| #define vloxseg6ei8_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_i8mf8 | ( | ... | ) | __riscv_vloxseg6ei8_v_i8mf8(__VA_ARGS__) |
| #define vloxseg6ei8_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg6ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u16m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_u16m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_u16m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u16mf2 | ( | ... | ) | __riscv_vloxseg6ei8_v_u16mf2(__VA_ARGS__) |
| #define vloxseg6ei8_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u16mf4 | ( | ... | ) | __riscv_vloxseg6ei8_v_u16mf4(__VA_ARGS__) |
| #define vloxseg6ei8_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u32m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_u32m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_u32m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u32mf2 | ( | ... | ) | __riscv_vloxseg6ei8_v_u32mf2(__VA_ARGS__) |
| #define vloxseg6ei8_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u64m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_u64m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_u64m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u8m1 | ( | ... | ) | __riscv_vloxseg6ei8_v_u8m1(__VA_ARGS__) |
| #define vloxseg6ei8_v_u8m1_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u8mf2 | ( | ... | ) | __riscv_vloxseg6ei8_v_u8mf2(__VA_ARGS__) |
| #define vloxseg6ei8_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u8mf4 | ( | ... | ) | __riscv_vloxseg6ei8_v_u8mf4(__VA_ARGS__) |
| #define vloxseg6ei8_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg6ei8_v_u8mf8 | ( | ... | ) | __riscv_vloxseg6ei8_v_u8mf8(__VA_ARGS__) |
| #define vloxseg6ei8_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg6ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_f16m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_f16m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_f16m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_f16mf2 | ( | ... | ) | __riscv_vloxseg7ei16_v_f16mf2(__VA_ARGS__) |
| #define vloxseg7ei16_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg7ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_f16mf4 | ( | ... | ) | __riscv_vloxseg7ei16_v_f16mf4(__VA_ARGS__) |
| #define vloxseg7ei16_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg7ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_f32m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_f32m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_f32m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_f32mf2 | ( | ... | ) | __riscv_vloxseg7ei16_v_f32mf2(__VA_ARGS__) |
| #define vloxseg7ei16_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg7ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_f64m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_f64m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_f64m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i16m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_i16m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_i16m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i16mf2 | ( | ... | ) | __riscv_vloxseg7ei16_v_i16mf2(__VA_ARGS__) |
| #define vloxseg7ei16_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i16mf4 | ( | ... | ) | __riscv_vloxseg7ei16_v_i16mf4(__VA_ARGS__) |
| #define vloxseg7ei16_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i32m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_i32m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_i32m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i32mf2 | ( | ... | ) | __riscv_vloxseg7ei16_v_i32mf2(__VA_ARGS__) |
| #define vloxseg7ei16_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i64m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_i64m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_i64m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i8m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_i8m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_i8m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i8mf2 | ( | ... | ) | __riscv_vloxseg7ei16_v_i8mf2(__VA_ARGS__) |
| #define vloxseg7ei16_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i8mf4 | ( | ... | ) | __riscv_vloxseg7ei16_v_i8mf4(__VA_ARGS__) |
| #define vloxseg7ei16_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_i8mf8 | ( | ... | ) | __riscv_vloxseg7ei16_v_i8mf8(__VA_ARGS__) |
| #define vloxseg7ei16_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg7ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u16m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_u16m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_u16m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u16mf2 | ( | ... | ) | __riscv_vloxseg7ei16_v_u16mf2(__VA_ARGS__) |
| #define vloxseg7ei16_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u16mf4 | ( | ... | ) | __riscv_vloxseg7ei16_v_u16mf4(__VA_ARGS__) |
| #define vloxseg7ei16_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u32m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_u32m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_u32m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u32mf2 | ( | ... | ) | __riscv_vloxseg7ei16_v_u32mf2(__VA_ARGS__) |
| #define vloxseg7ei16_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u64m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_u64m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_u64m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u8m1 | ( | ... | ) | __riscv_vloxseg7ei16_v_u8m1(__VA_ARGS__) |
| #define vloxseg7ei16_v_u8m1_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u8mf2 | ( | ... | ) | __riscv_vloxseg7ei16_v_u8mf2(__VA_ARGS__) |
| #define vloxseg7ei16_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u8mf4 | ( | ... | ) | __riscv_vloxseg7ei16_v_u8mf4(__VA_ARGS__) |
| #define vloxseg7ei16_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei16_v_u8mf8 | ( | ... | ) | __riscv_vloxseg7ei16_v_u8mf8(__VA_ARGS__) |
| #define vloxseg7ei16_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg7ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_f16m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_f16m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_f16m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_f16mf2 | ( | ... | ) | __riscv_vloxseg7ei32_v_f16mf2(__VA_ARGS__) |
| #define vloxseg7ei32_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg7ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_f16mf4 | ( | ... | ) | __riscv_vloxseg7ei32_v_f16mf4(__VA_ARGS__) |
| #define vloxseg7ei32_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg7ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_f32m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_f32m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_f32m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_f32mf2 | ( | ... | ) | __riscv_vloxseg7ei32_v_f32mf2(__VA_ARGS__) |
| #define vloxseg7ei32_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg7ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_f64m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_f64m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_f64m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i16m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_i16m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_i16m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i16mf2 | ( | ... | ) | __riscv_vloxseg7ei32_v_i16mf2(__VA_ARGS__) |
| #define vloxseg7ei32_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i16mf4 | ( | ... | ) | __riscv_vloxseg7ei32_v_i16mf4(__VA_ARGS__) |
| #define vloxseg7ei32_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i32m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_i32m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_i32m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i32mf2 | ( | ... | ) | __riscv_vloxseg7ei32_v_i32mf2(__VA_ARGS__) |
| #define vloxseg7ei32_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i64m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_i64m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_i64m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i8m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_i8m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_i8m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i8mf2 | ( | ... | ) | __riscv_vloxseg7ei32_v_i8mf2(__VA_ARGS__) |
| #define vloxseg7ei32_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i8mf4 | ( | ... | ) | __riscv_vloxseg7ei32_v_i8mf4(__VA_ARGS__) |
| #define vloxseg7ei32_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_i8mf8 | ( | ... | ) | __riscv_vloxseg7ei32_v_i8mf8(__VA_ARGS__) |
| #define vloxseg7ei32_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg7ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u16m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_u16m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_u16m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u16mf2 | ( | ... | ) | __riscv_vloxseg7ei32_v_u16mf2(__VA_ARGS__) |
| #define vloxseg7ei32_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u16mf4 | ( | ... | ) | __riscv_vloxseg7ei32_v_u16mf4(__VA_ARGS__) |
| #define vloxseg7ei32_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u32m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_u32m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_u32m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u32mf2 | ( | ... | ) | __riscv_vloxseg7ei32_v_u32mf2(__VA_ARGS__) |
| #define vloxseg7ei32_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u64m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_u64m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_u64m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u8m1 | ( | ... | ) | __riscv_vloxseg7ei32_v_u8m1(__VA_ARGS__) |
| #define vloxseg7ei32_v_u8m1_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u8mf2 | ( | ... | ) | __riscv_vloxseg7ei32_v_u8mf2(__VA_ARGS__) |
| #define vloxseg7ei32_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u8mf4 | ( | ... | ) | __riscv_vloxseg7ei32_v_u8mf4(__VA_ARGS__) |
| #define vloxseg7ei32_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei32_v_u8mf8 | ( | ... | ) | __riscv_vloxseg7ei32_v_u8mf8(__VA_ARGS__) |
| #define vloxseg7ei32_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg7ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_f16m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_f16m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_f16m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_f16mf2 | ( | ... | ) | __riscv_vloxseg7ei64_v_f16mf2(__VA_ARGS__) |
| #define vloxseg7ei64_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg7ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_f16mf4 | ( | ... | ) | __riscv_vloxseg7ei64_v_f16mf4(__VA_ARGS__) |
| #define vloxseg7ei64_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg7ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_f32m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_f32m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_f32m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_f32mf2 | ( | ... | ) | __riscv_vloxseg7ei64_v_f32mf2(__VA_ARGS__) |
| #define vloxseg7ei64_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg7ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_f64m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_f64m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_f64m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i16m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_i16m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_i16m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i16mf2 | ( | ... | ) | __riscv_vloxseg7ei64_v_i16mf2(__VA_ARGS__) |
| #define vloxseg7ei64_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i16mf4 | ( | ... | ) | __riscv_vloxseg7ei64_v_i16mf4(__VA_ARGS__) |
| #define vloxseg7ei64_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i32m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_i32m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_i32m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i32mf2 | ( | ... | ) | __riscv_vloxseg7ei64_v_i32mf2(__VA_ARGS__) |
| #define vloxseg7ei64_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i64m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_i64m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_i64m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i8m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_i8m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_i8m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i8mf2 | ( | ... | ) | __riscv_vloxseg7ei64_v_i8mf2(__VA_ARGS__) |
| #define vloxseg7ei64_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i8mf4 | ( | ... | ) | __riscv_vloxseg7ei64_v_i8mf4(__VA_ARGS__) |
| #define vloxseg7ei64_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_i8mf8 | ( | ... | ) | __riscv_vloxseg7ei64_v_i8mf8(__VA_ARGS__) |
| #define vloxseg7ei64_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg7ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u16m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_u16m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_u16m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u16mf2 | ( | ... | ) | __riscv_vloxseg7ei64_v_u16mf2(__VA_ARGS__) |
| #define vloxseg7ei64_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u16mf4 | ( | ... | ) | __riscv_vloxseg7ei64_v_u16mf4(__VA_ARGS__) |
| #define vloxseg7ei64_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u32m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_u32m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_u32m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u32mf2 | ( | ... | ) | __riscv_vloxseg7ei64_v_u32mf2(__VA_ARGS__) |
| #define vloxseg7ei64_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u64m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_u64m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_u64m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u8m1 | ( | ... | ) | __riscv_vloxseg7ei64_v_u8m1(__VA_ARGS__) |
| #define vloxseg7ei64_v_u8m1_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u8mf2 | ( | ... | ) | __riscv_vloxseg7ei64_v_u8mf2(__VA_ARGS__) |
| #define vloxseg7ei64_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u8mf4 | ( | ... | ) | __riscv_vloxseg7ei64_v_u8mf4(__VA_ARGS__) |
| #define vloxseg7ei64_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei64_v_u8mf8 | ( | ... | ) | __riscv_vloxseg7ei64_v_u8mf8(__VA_ARGS__) |
| #define vloxseg7ei64_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg7ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_f16m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_f16m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_f16m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_f16mf2 | ( | ... | ) | __riscv_vloxseg7ei8_v_f16mf2(__VA_ARGS__) |
| #define vloxseg7ei8_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg7ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_f16mf4 | ( | ... | ) | __riscv_vloxseg7ei8_v_f16mf4(__VA_ARGS__) |
| #define vloxseg7ei8_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg7ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_f32m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_f32m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_f32m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_f32mf2 | ( | ... | ) | __riscv_vloxseg7ei8_v_f32mf2(__VA_ARGS__) |
| #define vloxseg7ei8_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg7ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_f64m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_f64m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_f64m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i16m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_i16m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_i16m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i16mf2 | ( | ... | ) | __riscv_vloxseg7ei8_v_i16mf2(__VA_ARGS__) |
| #define vloxseg7ei8_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i16mf4 | ( | ... | ) | __riscv_vloxseg7ei8_v_i16mf4(__VA_ARGS__) |
| #define vloxseg7ei8_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i32m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_i32m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_i32m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i32mf2 | ( | ... | ) | __riscv_vloxseg7ei8_v_i32mf2(__VA_ARGS__) |
| #define vloxseg7ei8_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i64m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_i64m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_i64m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i8m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_i8m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_i8m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i8mf2 | ( | ... | ) | __riscv_vloxseg7ei8_v_i8mf2(__VA_ARGS__) |
| #define vloxseg7ei8_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i8mf4 | ( | ... | ) | __riscv_vloxseg7ei8_v_i8mf4(__VA_ARGS__) |
| #define vloxseg7ei8_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_i8mf8 | ( | ... | ) | __riscv_vloxseg7ei8_v_i8mf8(__VA_ARGS__) |
| #define vloxseg7ei8_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg7ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u16m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_u16m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_u16m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u16mf2 | ( | ... | ) | __riscv_vloxseg7ei8_v_u16mf2(__VA_ARGS__) |
| #define vloxseg7ei8_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u16mf4 | ( | ... | ) | __riscv_vloxseg7ei8_v_u16mf4(__VA_ARGS__) |
| #define vloxseg7ei8_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u32m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_u32m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_u32m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u32mf2 | ( | ... | ) | __riscv_vloxseg7ei8_v_u32mf2(__VA_ARGS__) |
| #define vloxseg7ei8_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u64m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_u64m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_u64m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u8m1 | ( | ... | ) | __riscv_vloxseg7ei8_v_u8m1(__VA_ARGS__) |
| #define vloxseg7ei8_v_u8m1_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u8mf2 | ( | ... | ) | __riscv_vloxseg7ei8_v_u8mf2(__VA_ARGS__) |
| #define vloxseg7ei8_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u8mf4 | ( | ... | ) | __riscv_vloxseg7ei8_v_u8mf4(__VA_ARGS__) |
| #define vloxseg7ei8_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg7ei8_v_u8mf8 | ( | ... | ) | __riscv_vloxseg7ei8_v_u8mf8(__VA_ARGS__) |
| #define vloxseg7ei8_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg7ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_f16m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_f16m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_f16m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_f16mf2 | ( | ... | ) | __riscv_vloxseg8ei16_v_f16mf2(__VA_ARGS__) |
| #define vloxseg8ei16_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg8ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_f16mf4 | ( | ... | ) | __riscv_vloxseg8ei16_v_f16mf4(__VA_ARGS__) |
| #define vloxseg8ei16_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg8ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_f32m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_f32m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_f32m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_f32mf2 | ( | ... | ) | __riscv_vloxseg8ei16_v_f32mf2(__VA_ARGS__) |
| #define vloxseg8ei16_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg8ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_f64m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_f64m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_f64m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i16m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_i16m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_i16m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i16mf2 | ( | ... | ) | __riscv_vloxseg8ei16_v_i16mf2(__VA_ARGS__) |
| #define vloxseg8ei16_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i16mf4 | ( | ... | ) | __riscv_vloxseg8ei16_v_i16mf4(__VA_ARGS__) |
| #define vloxseg8ei16_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i32m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_i32m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_i32m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i32mf2 | ( | ... | ) | __riscv_vloxseg8ei16_v_i32mf2(__VA_ARGS__) |
| #define vloxseg8ei16_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i64m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_i64m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_i64m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i8m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_i8m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_i8m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i8mf2 | ( | ... | ) | __riscv_vloxseg8ei16_v_i8mf2(__VA_ARGS__) |
| #define vloxseg8ei16_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i8mf4 | ( | ... | ) | __riscv_vloxseg8ei16_v_i8mf4(__VA_ARGS__) |
| #define vloxseg8ei16_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_i8mf8 | ( | ... | ) | __riscv_vloxseg8ei16_v_i8mf8(__VA_ARGS__) |
| #define vloxseg8ei16_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg8ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u16m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_u16m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_u16m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u16mf2 | ( | ... | ) | __riscv_vloxseg8ei16_v_u16mf2(__VA_ARGS__) |
| #define vloxseg8ei16_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u16mf4 | ( | ... | ) | __riscv_vloxseg8ei16_v_u16mf4(__VA_ARGS__) |
| #define vloxseg8ei16_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u32m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_u32m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_u32m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u32mf2 | ( | ... | ) | __riscv_vloxseg8ei16_v_u32mf2(__VA_ARGS__) |
| #define vloxseg8ei16_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u64m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_u64m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_u64m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u8m1 | ( | ... | ) | __riscv_vloxseg8ei16_v_u8m1(__VA_ARGS__) |
| #define vloxseg8ei16_v_u8m1_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u8mf2 | ( | ... | ) | __riscv_vloxseg8ei16_v_u8mf2(__VA_ARGS__) |
| #define vloxseg8ei16_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u8mf4 | ( | ... | ) | __riscv_vloxseg8ei16_v_u8mf4(__VA_ARGS__) |
| #define vloxseg8ei16_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei16_v_u8mf8 | ( | ... | ) | __riscv_vloxseg8ei16_v_u8mf8(__VA_ARGS__) |
| #define vloxseg8ei16_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg8ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_f16m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_f16m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_f16m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_f16mf2 | ( | ... | ) | __riscv_vloxseg8ei32_v_f16mf2(__VA_ARGS__) |
| #define vloxseg8ei32_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg8ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_f16mf4 | ( | ... | ) | __riscv_vloxseg8ei32_v_f16mf4(__VA_ARGS__) |
| #define vloxseg8ei32_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg8ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_f32m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_f32m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_f32m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_f32mf2 | ( | ... | ) | __riscv_vloxseg8ei32_v_f32mf2(__VA_ARGS__) |
| #define vloxseg8ei32_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg8ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_f64m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_f64m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_f64m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i16m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_i16m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_i16m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i16mf2 | ( | ... | ) | __riscv_vloxseg8ei32_v_i16mf2(__VA_ARGS__) |
| #define vloxseg8ei32_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i16mf4 | ( | ... | ) | __riscv_vloxseg8ei32_v_i16mf4(__VA_ARGS__) |
| #define vloxseg8ei32_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i32m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_i32m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_i32m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i32mf2 | ( | ... | ) | __riscv_vloxseg8ei32_v_i32mf2(__VA_ARGS__) |
| #define vloxseg8ei32_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i64m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_i64m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_i64m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i8m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_i8m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_i8m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i8mf2 | ( | ... | ) | __riscv_vloxseg8ei32_v_i8mf2(__VA_ARGS__) |
| #define vloxseg8ei32_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i8mf4 | ( | ... | ) | __riscv_vloxseg8ei32_v_i8mf4(__VA_ARGS__) |
| #define vloxseg8ei32_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_i8mf8 | ( | ... | ) | __riscv_vloxseg8ei32_v_i8mf8(__VA_ARGS__) |
| #define vloxseg8ei32_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg8ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u16m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_u16m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_u16m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u16mf2 | ( | ... | ) | __riscv_vloxseg8ei32_v_u16mf2(__VA_ARGS__) |
| #define vloxseg8ei32_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u16mf4 | ( | ... | ) | __riscv_vloxseg8ei32_v_u16mf4(__VA_ARGS__) |
| #define vloxseg8ei32_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u32m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_u32m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_u32m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u32mf2 | ( | ... | ) | __riscv_vloxseg8ei32_v_u32mf2(__VA_ARGS__) |
| #define vloxseg8ei32_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u64m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_u64m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_u64m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u8m1 | ( | ... | ) | __riscv_vloxseg8ei32_v_u8m1(__VA_ARGS__) |
| #define vloxseg8ei32_v_u8m1_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u8mf2 | ( | ... | ) | __riscv_vloxseg8ei32_v_u8mf2(__VA_ARGS__) |
| #define vloxseg8ei32_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u8mf4 | ( | ... | ) | __riscv_vloxseg8ei32_v_u8mf4(__VA_ARGS__) |
| #define vloxseg8ei32_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei32_v_u8mf8 | ( | ... | ) | __riscv_vloxseg8ei32_v_u8mf8(__VA_ARGS__) |
| #define vloxseg8ei32_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg8ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_f16m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_f16m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_f16m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_f16mf2 | ( | ... | ) | __riscv_vloxseg8ei64_v_f16mf2(__VA_ARGS__) |
| #define vloxseg8ei64_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg8ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_f16mf4 | ( | ... | ) | __riscv_vloxseg8ei64_v_f16mf4(__VA_ARGS__) |
| #define vloxseg8ei64_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg8ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_f32m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_f32m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_f32m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_f32mf2 | ( | ... | ) | __riscv_vloxseg8ei64_v_f32mf2(__VA_ARGS__) |
| #define vloxseg8ei64_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg8ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_f64m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_f64m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_f64m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i16m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_i16m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_i16m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i16mf2 | ( | ... | ) | __riscv_vloxseg8ei64_v_i16mf2(__VA_ARGS__) |
| #define vloxseg8ei64_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i16mf4 | ( | ... | ) | __riscv_vloxseg8ei64_v_i16mf4(__VA_ARGS__) |
| #define vloxseg8ei64_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i32m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_i32m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_i32m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i32mf2 | ( | ... | ) | __riscv_vloxseg8ei64_v_i32mf2(__VA_ARGS__) |
| #define vloxseg8ei64_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i64m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_i64m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_i64m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i8m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_i8m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_i8m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i8mf2 | ( | ... | ) | __riscv_vloxseg8ei64_v_i8mf2(__VA_ARGS__) |
| #define vloxseg8ei64_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i8mf4 | ( | ... | ) | __riscv_vloxseg8ei64_v_i8mf4(__VA_ARGS__) |
| #define vloxseg8ei64_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_i8mf8 | ( | ... | ) | __riscv_vloxseg8ei64_v_i8mf8(__VA_ARGS__) |
| #define vloxseg8ei64_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg8ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u16m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_u16m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_u16m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u16mf2 | ( | ... | ) | __riscv_vloxseg8ei64_v_u16mf2(__VA_ARGS__) |
| #define vloxseg8ei64_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u16mf4 | ( | ... | ) | __riscv_vloxseg8ei64_v_u16mf4(__VA_ARGS__) |
| #define vloxseg8ei64_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u32m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_u32m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_u32m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u32mf2 | ( | ... | ) | __riscv_vloxseg8ei64_v_u32mf2(__VA_ARGS__) |
| #define vloxseg8ei64_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u64m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_u64m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_u64m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u8m1 | ( | ... | ) | __riscv_vloxseg8ei64_v_u8m1(__VA_ARGS__) |
| #define vloxseg8ei64_v_u8m1_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u8mf2 | ( | ... | ) | __riscv_vloxseg8ei64_v_u8mf2(__VA_ARGS__) |
| #define vloxseg8ei64_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u8mf4 | ( | ... | ) | __riscv_vloxseg8ei64_v_u8mf4(__VA_ARGS__) |
| #define vloxseg8ei64_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei64_v_u8mf8 | ( | ... | ) | __riscv_vloxseg8ei64_v_u8mf8(__VA_ARGS__) |
| #define vloxseg8ei64_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg8ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_f16m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_f16m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_f16m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_f16mf2 | ( | ... | ) | __riscv_vloxseg8ei8_v_f16mf2(__VA_ARGS__) |
| #define vloxseg8ei8_v_f16mf2_m | ( | ... | ) | __riscv_vloxseg8ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_f16mf4 | ( | ... | ) | __riscv_vloxseg8ei8_v_f16mf4(__VA_ARGS__) |
| #define vloxseg8ei8_v_f16mf4_m | ( | ... | ) | __riscv_vloxseg8ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_f32m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_f32m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_f32m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_f32mf2 | ( | ... | ) | __riscv_vloxseg8ei8_v_f32mf2(__VA_ARGS__) |
| #define vloxseg8ei8_v_f32mf2_m | ( | ... | ) | __riscv_vloxseg8ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_f64m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_f64m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_f64m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i16m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_i16m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_i16m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i16mf2 | ( | ... | ) | __riscv_vloxseg8ei8_v_i16mf2(__VA_ARGS__) |
| #define vloxseg8ei8_v_i16mf2_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i16mf4 | ( | ... | ) | __riscv_vloxseg8ei8_v_i16mf4(__VA_ARGS__) |
| #define vloxseg8ei8_v_i16mf4_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i32m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_i32m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_i32m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i32mf2 | ( | ... | ) | __riscv_vloxseg8ei8_v_i32mf2(__VA_ARGS__) |
| #define vloxseg8ei8_v_i32mf2_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i64m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_i64m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_i64m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i8m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_i8m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_i8m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i8mf2 | ( | ... | ) | __riscv_vloxseg8ei8_v_i8mf2(__VA_ARGS__) |
| #define vloxseg8ei8_v_i8mf2_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i8mf4 | ( | ... | ) | __riscv_vloxseg8ei8_v_i8mf4(__VA_ARGS__) |
| #define vloxseg8ei8_v_i8mf4_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_i8mf8 | ( | ... | ) | __riscv_vloxseg8ei8_v_i8mf8(__VA_ARGS__) |
| #define vloxseg8ei8_v_i8mf8_m | ( | ... | ) | __riscv_vloxseg8ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u16m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_u16m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_u16m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u16mf2 | ( | ... | ) | __riscv_vloxseg8ei8_v_u16mf2(__VA_ARGS__) |
| #define vloxseg8ei8_v_u16mf2_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u16mf4 | ( | ... | ) | __riscv_vloxseg8ei8_v_u16mf4(__VA_ARGS__) |
| #define vloxseg8ei8_v_u16mf4_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u32m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_u32m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_u32m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u32mf2 | ( | ... | ) | __riscv_vloxseg8ei8_v_u32mf2(__VA_ARGS__) |
| #define vloxseg8ei8_v_u32mf2_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u64m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_u64m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_u64m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u8m1 | ( | ... | ) | __riscv_vloxseg8ei8_v_u8m1(__VA_ARGS__) |
| #define vloxseg8ei8_v_u8m1_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u8mf2 | ( | ... | ) | __riscv_vloxseg8ei8_v_u8mf2(__VA_ARGS__) |
| #define vloxseg8ei8_v_u8mf2_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u8mf4 | ( | ... | ) | __riscv_vloxseg8ei8_v_u8mf4(__VA_ARGS__) |
| #define vloxseg8ei8_v_u8mf4_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vloxseg8ei8_v_u8mf8 | ( | ... | ) | __riscv_vloxseg8ei8_v_u8mf8(__VA_ARGS__) |
| #define vloxseg8ei8_v_u8mf8_m | ( | ... | ) | __riscv_vloxseg8ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlse16_v_f16m1 | ( | ... | ) | __riscv_vlse16_v_f16m1(__VA_ARGS__) |
| #define vlse16_v_f16m1_m | ( | ... | ) | __riscv_vlse16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlse16_v_f16m2 | ( | ... | ) | __riscv_vlse16_v_f16m2(__VA_ARGS__) |
| #define vlse16_v_f16m2_m | ( | ... | ) | __riscv_vlse16_v_f16m2_tumu(__VA_ARGS__) |
| #define vlse16_v_f16m4 | ( | ... | ) | __riscv_vlse16_v_f16m4(__VA_ARGS__) |
| #define vlse16_v_f16m4_m | ( | ... | ) | __riscv_vlse16_v_f16m4_tumu(__VA_ARGS__) |
| #define vlse16_v_f16m8 | ( | ... | ) | __riscv_vlse16_v_f16m8(__VA_ARGS__) |
| #define vlse16_v_f16m8_m | ( | ... | ) | __riscv_vlse16_v_f16m8_tumu(__VA_ARGS__) |
| #define vlse16_v_f16mf2 | ( | ... | ) | __riscv_vlse16_v_f16mf2(__VA_ARGS__) |
| #define vlse16_v_f16mf2_m | ( | ... | ) | __riscv_vlse16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlse16_v_f16mf4 | ( | ... | ) | __riscv_vlse16_v_f16mf4(__VA_ARGS__) |
| #define vlse16_v_f16mf4_m | ( | ... | ) | __riscv_vlse16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlse16_v_i16m1 | ( | ... | ) | __riscv_vlse16_v_i16m1(__VA_ARGS__) |
| #define vlse16_v_i16m1_m | ( | ... | ) | __riscv_vlse16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlse16_v_i16m2 | ( | ... | ) | __riscv_vlse16_v_i16m2(__VA_ARGS__) |
| #define vlse16_v_i16m2_m | ( | ... | ) | __riscv_vlse16_v_i16m2_tumu(__VA_ARGS__) |
| #define vlse16_v_i16m4 | ( | ... | ) | __riscv_vlse16_v_i16m4(__VA_ARGS__) |
| #define vlse16_v_i16m4_m | ( | ... | ) | __riscv_vlse16_v_i16m4_tumu(__VA_ARGS__) |
| #define vlse16_v_i16m8 | ( | ... | ) | __riscv_vlse16_v_i16m8(__VA_ARGS__) |
| #define vlse16_v_i16m8_m | ( | ... | ) | __riscv_vlse16_v_i16m8_tumu(__VA_ARGS__) |
| #define vlse16_v_i16mf2 | ( | ... | ) | __riscv_vlse16_v_i16mf2(__VA_ARGS__) |
| #define vlse16_v_i16mf2_m | ( | ... | ) | __riscv_vlse16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlse16_v_i16mf4 | ( | ... | ) | __riscv_vlse16_v_i16mf4(__VA_ARGS__) |
| #define vlse16_v_i16mf4_m | ( | ... | ) | __riscv_vlse16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlse16_v_u16m1 | ( | ... | ) | __riscv_vlse16_v_u16m1(__VA_ARGS__) |
| #define vlse16_v_u16m1_m | ( | ... | ) | __riscv_vlse16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlse16_v_u16m2 | ( | ... | ) | __riscv_vlse16_v_u16m2(__VA_ARGS__) |
| #define vlse16_v_u16m2_m | ( | ... | ) | __riscv_vlse16_v_u16m2_tumu(__VA_ARGS__) |
| #define vlse16_v_u16m4 | ( | ... | ) | __riscv_vlse16_v_u16m4(__VA_ARGS__) |
| #define vlse16_v_u16m4_m | ( | ... | ) | __riscv_vlse16_v_u16m4_tumu(__VA_ARGS__) |
| #define vlse16_v_u16m8 | ( | ... | ) | __riscv_vlse16_v_u16m8(__VA_ARGS__) |
| #define vlse16_v_u16m8_m | ( | ... | ) | __riscv_vlse16_v_u16m8_tumu(__VA_ARGS__) |
| #define vlse16_v_u16mf2 | ( | ... | ) | __riscv_vlse16_v_u16mf2(__VA_ARGS__) |
| #define vlse16_v_u16mf2_m | ( | ... | ) | __riscv_vlse16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlse16_v_u16mf4 | ( | ... | ) | __riscv_vlse16_v_u16mf4(__VA_ARGS__) |
| #define vlse16_v_u16mf4_m | ( | ... | ) | __riscv_vlse16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlse32_v_f32m1 | ( | ... | ) | __riscv_vlse32_v_f32m1(__VA_ARGS__) |
| #define vlse32_v_f32m1_m | ( | ... | ) | __riscv_vlse32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlse32_v_f32m2 | ( | ... | ) | __riscv_vlse32_v_f32m2(__VA_ARGS__) |
| #define vlse32_v_f32m2_m | ( | ... | ) | __riscv_vlse32_v_f32m2_tumu(__VA_ARGS__) |
| #define vlse32_v_f32m4 | ( | ... | ) | __riscv_vlse32_v_f32m4(__VA_ARGS__) |
| #define vlse32_v_f32m4_m | ( | ... | ) | __riscv_vlse32_v_f32m4_tumu(__VA_ARGS__) |
| #define vlse32_v_f32m8 | ( | ... | ) | __riscv_vlse32_v_f32m8(__VA_ARGS__) |
| #define vlse32_v_f32m8_m | ( | ... | ) | __riscv_vlse32_v_f32m8_tumu(__VA_ARGS__) |
| #define vlse32_v_f32mf2 | ( | ... | ) | __riscv_vlse32_v_f32mf2(__VA_ARGS__) |
| #define vlse32_v_f32mf2_m | ( | ... | ) | __riscv_vlse32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlse32_v_i32m1 | ( | ... | ) | __riscv_vlse32_v_i32m1(__VA_ARGS__) |
| #define vlse32_v_i32m1_m | ( | ... | ) | __riscv_vlse32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlse32_v_i32m2 | ( | ... | ) | __riscv_vlse32_v_i32m2(__VA_ARGS__) |
| #define vlse32_v_i32m2_m | ( | ... | ) | __riscv_vlse32_v_i32m2_tumu(__VA_ARGS__) |
| #define vlse32_v_i32m4 | ( | ... | ) | __riscv_vlse32_v_i32m4(__VA_ARGS__) |
| #define vlse32_v_i32m4_m | ( | ... | ) | __riscv_vlse32_v_i32m4_tumu(__VA_ARGS__) |
| #define vlse32_v_i32m8 | ( | ... | ) | __riscv_vlse32_v_i32m8(__VA_ARGS__) |
| #define vlse32_v_i32m8_m | ( | ... | ) | __riscv_vlse32_v_i32m8_tumu(__VA_ARGS__) |
| #define vlse32_v_i32mf2 | ( | ... | ) | __riscv_vlse32_v_i32mf2(__VA_ARGS__) |
| #define vlse32_v_i32mf2_m | ( | ... | ) | __riscv_vlse32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlse32_v_u32m1 | ( | ... | ) | __riscv_vlse32_v_u32m1(__VA_ARGS__) |
| #define vlse32_v_u32m1_m | ( | ... | ) | __riscv_vlse32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlse32_v_u32m2 | ( | ... | ) | __riscv_vlse32_v_u32m2(__VA_ARGS__) |
| #define vlse32_v_u32m2_m | ( | ... | ) | __riscv_vlse32_v_u32m2_tumu(__VA_ARGS__) |
| #define vlse32_v_u32m4 | ( | ... | ) | __riscv_vlse32_v_u32m4(__VA_ARGS__) |
| #define vlse32_v_u32m4_m | ( | ... | ) | __riscv_vlse32_v_u32m4_tumu(__VA_ARGS__) |
| #define vlse32_v_u32m8 | ( | ... | ) | __riscv_vlse32_v_u32m8(__VA_ARGS__) |
| #define vlse32_v_u32m8_m | ( | ... | ) | __riscv_vlse32_v_u32m8_tumu(__VA_ARGS__) |
| #define vlse32_v_u32mf2 | ( | ... | ) | __riscv_vlse32_v_u32mf2(__VA_ARGS__) |
| #define vlse32_v_u32mf2_m | ( | ... | ) | __riscv_vlse32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlse64_v_f64m1 | ( | ... | ) | __riscv_vlse64_v_f64m1(__VA_ARGS__) |
| #define vlse64_v_f64m1_m | ( | ... | ) | __riscv_vlse64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlse64_v_f64m2 | ( | ... | ) | __riscv_vlse64_v_f64m2(__VA_ARGS__) |
| #define vlse64_v_f64m2_m | ( | ... | ) | __riscv_vlse64_v_f64m2_tumu(__VA_ARGS__) |
| #define vlse64_v_f64m4 | ( | ... | ) | __riscv_vlse64_v_f64m4(__VA_ARGS__) |
| #define vlse64_v_f64m4_m | ( | ... | ) | __riscv_vlse64_v_f64m4_tumu(__VA_ARGS__) |
| #define vlse64_v_f64m8 | ( | ... | ) | __riscv_vlse64_v_f64m8(__VA_ARGS__) |
| #define vlse64_v_f64m8_m | ( | ... | ) | __riscv_vlse64_v_f64m8_tumu(__VA_ARGS__) |
| #define vlse64_v_i64m1 | ( | ... | ) | __riscv_vlse64_v_i64m1(__VA_ARGS__) |
| #define vlse64_v_i64m1_m | ( | ... | ) | __riscv_vlse64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlse64_v_i64m2 | ( | ... | ) | __riscv_vlse64_v_i64m2(__VA_ARGS__) |
| #define vlse64_v_i64m2_m | ( | ... | ) | __riscv_vlse64_v_i64m2_tumu(__VA_ARGS__) |
| #define vlse64_v_i64m4 | ( | ... | ) | __riscv_vlse64_v_i64m4(__VA_ARGS__) |
| #define vlse64_v_i64m4_m | ( | ... | ) | __riscv_vlse64_v_i64m4_tumu(__VA_ARGS__) |
| #define vlse64_v_i64m8 | ( | ... | ) | __riscv_vlse64_v_i64m8(__VA_ARGS__) |
| #define vlse64_v_i64m8_m | ( | ... | ) | __riscv_vlse64_v_i64m8_tumu(__VA_ARGS__) |
| #define vlse64_v_u64m1 | ( | ... | ) | __riscv_vlse64_v_u64m1(__VA_ARGS__) |
| #define vlse64_v_u64m1_m | ( | ... | ) | __riscv_vlse64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlse64_v_u64m2 | ( | ... | ) | __riscv_vlse64_v_u64m2(__VA_ARGS__) |
| #define vlse64_v_u64m2_m | ( | ... | ) | __riscv_vlse64_v_u64m2_tumu(__VA_ARGS__) |
| #define vlse64_v_u64m4 | ( | ... | ) | __riscv_vlse64_v_u64m4(__VA_ARGS__) |
| #define vlse64_v_u64m4_m | ( | ... | ) | __riscv_vlse64_v_u64m4_tumu(__VA_ARGS__) |
| #define vlse64_v_u64m8 | ( | ... | ) | __riscv_vlse64_v_u64m8(__VA_ARGS__) |
| #define vlse64_v_u64m8_m | ( | ... | ) | __riscv_vlse64_v_u64m8_tumu(__VA_ARGS__) |
| #define vlse8_v_i8m1 | ( | ... | ) | __riscv_vlse8_v_i8m1(__VA_ARGS__) |
| #define vlse8_v_i8m1_m | ( | ... | ) | __riscv_vlse8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlse8_v_i8m2 | ( | ... | ) | __riscv_vlse8_v_i8m2(__VA_ARGS__) |
| #define vlse8_v_i8m2_m | ( | ... | ) | __riscv_vlse8_v_i8m2_tumu(__VA_ARGS__) |
| #define vlse8_v_i8m4 | ( | ... | ) | __riscv_vlse8_v_i8m4(__VA_ARGS__) |
| #define vlse8_v_i8m4_m | ( | ... | ) | __riscv_vlse8_v_i8m4_tumu(__VA_ARGS__) |
| #define vlse8_v_i8m8 | ( | ... | ) | __riscv_vlse8_v_i8m8(__VA_ARGS__) |
| #define vlse8_v_i8m8_m | ( | ... | ) | __riscv_vlse8_v_i8m8_tumu(__VA_ARGS__) |
| #define vlse8_v_i8mf2 | ( | ... | ) | __riscv_vlse8_v_i8mf2(__VA_ARGS__) |
| #define vlse8_v_i8mf2_m | ( | ... | ) | __riscv_vlse8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlse8_v_i8mf4 | ( | ... | ) | __riscv_vlse8_v_i8mf4(__VA_ARGS__) |
| #define vlse8_v_i8mf4_m | ( | ... | ) | __riscv_vlse8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlse8_v_i8mf8 | ( | ... | ) | __riscv_vlse8_v_i8mf8(__VA_ARGS__) |
| #define vlse8_v_i8mf8_m | ( | ... | ) | __riscv_vlse8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlse8_v_u8m1 | ( | ... | ) | __riscv_vlse8_v_u8m1(__VA_ARGS__) |
| #define vlse8_v_u8m1_m | ( | ... | ) | __riscv_vlse8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlse8_v_u8m2 | ( | ... | ) | __riscv_vlse8_v_u8m2(__VA_ARGS__) |
| #define vlse8_v_u8m2_m | ( | ... | ) | __riscv_vlse8_v_u8m2_tumu(__VA_ARGS__) |
| #define vlse8_v_u8m4 | ( | ... | ) | __riscv_vlse8_v_u8m4(__VA_ARGS__) |
| #define vlse8_v_u8m4_m | ( | ... | ) | __riscv_vlse8_v_u8m4_tumu(__VA_ARGS__) |
| #define vlse8_v_u8m8 | ( | ... | ) | __riscv_vlse8_v_u8m8(__VA_ARGS__) |
| #define vlse8_v_u8m8_m | ( | ... | ) | __riscv_vlse8_v_u8m8_tumu(__VA_ARGS__) |
| #define vlse8_v_u8mf2 | ( | ... | ) | __riscv_vlse8_v_u8mf2(__VA_ARGS__) |
| #define vlse8_v_u8mf2_m | ( | ... | ) | __riscv_vlse8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlse8_v_u8mf4 | ( | ... | ) | __riscv_vlse8_v_u8mf4(__VA_ARGS__) |
| #define vlse8_v_u8mf4_m | ( | ... | ) | __riscv_vlse8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlse8_v_u8mf8 | ( | ... | ) | __riscv_vlse8_v_u8mf8(__VA_ARGS__) |
| #define vlse8_v_u8mf8_m | ( | ... | ) | __riscv_vlse8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_f16m1 | ( | ... | ) | __riscv_vlseg2e16_v_f16m1(__VA_ARGS__) |
| #define vlseg2e16_v_f16m1_m | ( | ... | ) | __riscv_vlseg2e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_f16m2 | ( | ... | ) | __riscv_vlseg2e16_v_f16m2(__VA_ARGS__) |
| #define vlseg2e16_v_f16m2_m | ( | ... | ) | __riscv_vlseg2e16_v_f16m2_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_f16m4 | ( | ... | ) | __riscv_vlseg2e16_v_f16m4(__VA_ARGS__) |
| #define vlseg2e16_v_f16m4_m | ( | ... | ) | __riscv_vlseg2e16_v_f16m4_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_f16mf2 | ( | ... | ) | __riscv_vlseg2e16_v_f16mf2(__VA_ARGS__) |
| #define vlseg2e16_v_f16mf2_m | ( | ... | ) | __riscv_vlseg2e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_f16mf4 | ( | ... | ) | __riscv_vlseg2e16_v_f16mf4(__VA_ARGS__) |
| #define vlseg2e16_v_f16mf4_m | ( | ... | ) | __riscv_vlseg2e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_i16m1 | ( | ... | ) | __riscv_vlseg2e16_v_i16m1(__VA_ARGS__) |
| #define vlseg2e16_v_i16m1_m | ( | ... | ) | __riscv_vlseg2e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_i16m2 | ( | ... | ) | __riscv_vlseg2e16_v_i16m2(__VA_ARGS__) |
| #define vlseg2e16_v_i16m2_m | ( | ... | ) | __riscv_vlseg2e16_v_i16m2_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_i16m4 | ( | ... | ) | __riscv_vlseg2e16_v_i16m4(__VA_ARGS__) |
| #define vlseg2e16_v_i16m4_m | ( | ... | ) | __riscv_vlseg2e16_v_i16m4_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_i16mf2 | ( | ... | ) | __riscv_vlseg2e16_v_i16mf2(__VA_ARGS__) |
| #define vlseg2e16_v_i16mf2_m | ( | ... | ) | __riscv_vlseg2e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_i16mf4 | ( | ... | ) | __riscv_vlseg2e16_v_i16mf4(__VA_ARGS__) |
| #define vlseg2e16_v_i16mf4_m | ( | ... | ) | __riscv_vlseg2e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_u16m1 | ( | ... | ) | __riscv_vlseg2e16_v_u16m1(__VA_ARGS__) |
| #define vlseg2e16_v_u16m1_m | ( | ... | ) | __riscv_vlseg2e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_u16m2 | ( | ... | ) | __riscv_vlseg2e16_v_u16m2(__VA_ARGS__) |
| #define vlseg2e16_v_u16m2_m | ( | ... | ) | __riscv_vlseg2e16_v_u16m2_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_u16m4 | ( | ... | ) | __riscv_vlseg2e16_v_u16m4(__VA_ARGS__) |
| #define vlseg2e16_v_u16m4_m | ( | ... | ) | __riscv_vlseg2e16_v_u16m4_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_u16mf2 | ( | ... | ) | __riscv_vlseg2e16_v_u16mf2(__VA_ARGS__) |
| #define vlseg2e16_v_u16mf2_m | ( | ... | ) | __riscv_vlseg2e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg2e16_v_u16mf4 | ( | ... | ) | __riscv_vlseg2e16_v_u16mf4(__VA_ARGS__) |
| #define vlseg2e16_v_u16mf4_m | ( | ... | ) | __riscv_vlseg2e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16m1 | ( | ... | ) | __riscv_vlseg2e16ff_v_f16m1(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16m1_m | ( | ... | ) | __riscv_vlseg2e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16m2 | ( | ... | ) | __riscv_vlseg2e16ff_v_f16m2(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16m2_m | ( | ... | ) | __riscv_vlseg2e16ff_v_f16m2_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16m4 | ( | ... | ) | __riscv_vlseg2e16ff_v_f16m4(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16m4_m | ( | ... | ) | __riscv_vlseg2e16ff_v_f16m4_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16mf2 | ( | ... | ) | __riscv_vlseg2e16ff_v_f16mf2(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16mf2_m | ( | ... | ) | __riscv_vlseg2e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16mf4 | ( | ... | ) | __riscv_vlseg2e16ff_v_f16mf4(__VA_ARGS__) |
| #define vlseg2e16ff_v_f16mf4_m | ( | ... | ) | __riscv_vlseg2e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16m1 | ( | ... | ) | __riscv_vlseg2e16ff_v_i16m1(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16m1_m | ( | ... | ) | __riscv_vlseg2e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16m2 | ( | ... | ) | __riscv_vlseg2e16ff_v_i16m2(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16m2_m | ( | ... | ) | __riscv_vlseg2e16ff_v_i16m2_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16m4 | ( | ... | ) | __riscv_vlseg2e16ff_v_i16m4(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16m4_m | ( | ... | ) | __riscv_vlseg2e16ff_v_i16m4_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16mf2 | ( | ... | ) | __riscv_vlseg2e16ff_v_i16mf2(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16mf2_m | ( | ... | ) | __riscv_vlseg2e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16mf4 | ( | ... | ) | __riscv_vlseg2e16ff_v_i16mf4(__VA_ARGS__) |
| #define vlseg2e16ff_v_i16mf4_m | ( | ... | ) | __riscv_vlseg2e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16m1 | ( | ... | ) | __riscv_vlseg2e16ff_v_u16m1(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16m1_m | ( | ... | ) | __riscv_vlseg2e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16m2 | ( | ... | ) | __riscv_vlseg2e16ff_v_u16m2(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16m2_m | ( | ... | ) | __riscv_vlseg2e16ff_v_u16m2_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16m4 | ( | ... | ) | __riscv_vlseg2e16ff_v_u16m4(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16m4_m | ( | ... | ) | __riscv_vlseg2e16ff_v_u16m4_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16mf2 | ( | ... | ) | __riscv_vlseg2e16ff_v_u16mf2(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16mf2_m | ( | ... | ) | __riscv_vlseg2e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16mf4 | ( | ... | ) | __riscv_vlseg2e16ff_v_u16mf4(__VA_ARGS__) |
| #define vlseg2e16ff_v_u16mf4_m | ( | ... | ) | __riscv_vlseg2e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_f32m1 | ( | ... | ) | __riscv_vlseg2e32_v_f32m1(__VA_ARGS__) |
| #define vlseg2e32_v_f32m1_m | ( | ... | ) | __riscv_vlseg2e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_f32m2 | ( | ... | ) | __riscv_vlseg2e32_v_f32m2(__VA_ARGS__) |
| #define vlseg2e32_v_f32m2_m | ( | ... | ) | __riscv_vlseg2e32_v_f32m2_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_f32m4 | ( | ... | ) | __riscv_vlseg2e32_v_f32m4(__VA_ARGS__) |
| #define vlseg2e32_v_f32m4_m | ( | ... | ) | __riscv_vlseg2e32_v_f32m4_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_f32mf2 | ( | ... | ) | __riscv_vlseg2e32_v_f32mf2(__VA_ARGS__) |
| #define vlseg2e32_v_f32mf2_m | ( | ... | ) | __riscv_vlseg2e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_i32m1 | ( | ... | ) | __riscv_vlseg2e32_v_i32m1(__VA_ARGS__) |
| #define vlseg2e32_v_i32m1_m | ( | ... | ) | __riscv_vlseg2e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_i32m2 | ( | ... | ) | __riscv_vlseg2e32_v_i32m2(__VA_ARGS__) |
| #define vlseg2e32_v_i32m2_m | ( | ... | ) | __riscv_vlseg2e32_v_i32m2_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_i32m4 | ( | ... | ) | __riscv_vlseg2e32_v_i32m4(__VA_ARGS__) |
| #define vlseg2e32_v_i32m4_m | ( | ... | ) | __riscv_vlseg2e32_v_i32m4_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_i32mf2 | ( | ... | ) | __riscv_vlseg2e32_v_i32mf2(__VA_ARGS__) |
| #define vlseg2e32_v_i32mf2_m | ( | ... | ) | __riscv_vlseg2e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_u32m1 | ( | ... | ) | __riscv_vlseg2e32_v_u32m1(__VA_ARGS__) |
| #define vlseg2e32_v_u32m1_m | ( | ... | ) | __riscv_vlseg2e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_u32m2 | ( | ... | ) | __riscv_vlseg2e32_v_u32m2(__VA_ARGS__) |
| #define vlseg2e32_v_u32m2_m | ( | ... | ) | __riscv_vlseg2e32_v_u32m2_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_u32m4 | ( | ... | ) | __riscv_vlseg2e32_v_u32m4(__VA_ARGS__) |
| #define vlseg2e32_v_u32m4_m | ( | ... | ) | __riscv_vlseg2e32_v_u32m4_tumu(__VA_ARGS__) |
| #define vlseg2e32_v_u32mf2 | ( | ... | ) | __riscv_vlseg2e32_v_u32mf2(__VA_ARGS__) |
| #define vlseg2e32_v_u32mf2_m | ( | ... | ) | __riscv_vlseg2e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_f32m1 | ( | ... | ) | __riscv_vlseg2e32ff_v_f32m1(__VA_ARGS__) |
| #define vlseg2e32ff_v_f32m1_m | ( | ... | ) | __riscv_vlseg2e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_f32m2 | ( | ... | ) | __riscv_vlseg2e32ff_v_f32m2(__VA_ARGS__) |
| #define vlseg2e32ff_v_f32m2_m | ( | ... | ) | __riscv_vlseg2e32ff_v_f32m2_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_f32m4 | ( | ... | ) | __riscv_vlseg2e32ff_v_f32m4(__VA_ARGS__) |
| #define vlseg2e32ff_v_f32m4_m | ( | ... | ) | __riscv_vlseg2e32ff_v_f32m4_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_f32mf2 | ( | ... | ) | __riscv_vlseg2e32ff_v_f32mf2(__VA_ARGS__) |
| #define vlseg2e32ff_v_f32mf2_m | ( | ... | ) | __riscv_vlseg2e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_i32m1 | ( | ... | ) | __riscv_vlseg2e32ff_v_i32m1(__VA_ARGS__) |
| #define vlseg2e32ff_v_i32m1_m | ( | ... | ) | __riscv_vlseg2e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_i32m2 | ( | ... | ) | __riscv_vlseg2e32ff_v_i32m2(__VA_ARGS__) |
| #define vlseg2e32ff_v_i32m2_m | ( | ... | ) | __riscv_vlseg2e32ff_v_i32m2_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_i32m4 | ( | ... | ) | __riscv_vlseg2e32ff_v_i32m4(__VA_ARGS__) |
| #define vlseg2e32ff_v_i32m4_m | ( | ... | ) | __riscv_vlseg2e32ff_v_i32m4_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_i32mf2 | ( | ... | ) | __riscv_vlseg2e32ff_v_i32mf2(__VA_ARGS__) |
| #define vlseg2e32ff_v_i32mf2_m | ( | ... | ) | __riscv_vlseg2e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_u32m1 | ( | ... | ) | __riscv_vlseg2e32ff_v_u32m1(__VA_ARGS__) |
| #define vlseg2e32ff_v_u32m1_m | ( | ... | ) | __riscv_vlseg2e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_u32m2 | ( | ... | ) | __riscv_vlseg2e32ff_v_u32m2(__VA_ARGS__) |
| #define vlseg2e32ff_v_u32m2_m | ( | ... | ) | __riscv_vlseg2e32ff_v_u32m2_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_u32m4 | ( | ... | ) | __riscv_vlseg2e32ff_v_u32m4(__VA_ARGS__) |
| #define vlseg2e32ff_v_u32m4_m | ( | ... | ) | __riscv_vlseg2e32ff_v_u32m4_tumu(__VA_ARGS__) |
| #define vlseg2e32ff_v_u32mf2 | ( | ... | ) | __riscv_vlseg2e32ff_v_u32mf2(__VA_ARGS__) |
| #define vlseg2e32ff_v_u32mf2_m | ( | ... | ) | __riscv_vlseg2e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_f64m1 | ( | ... | ) | __riscv_vlseg2e64_v_f64m1(__VA_ARGS__) |
| #define vlseg2e64_v_f64m1_m | ( | ... | ) | __riscv_vlseg2e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_f64m2 | ( | ... | ) | __riscv_vlseg2e64_v_f64m2(__VA_ARGS__) |
| #define vlseg2e64_v_f64m2_m | ( | ... | ) | __riscv_vlseg2e64_v_f64m2_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_f64m4 | ( | ... | ) | __riscv_vlseg2e64_v_f64m4(__VA_ARGS__) |
| #define vlseg2e64_v_f64m4_m | ( | ... | ) | __riscv_vlseg2e64_v_f64m4_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_i64m1 | ( | ... | ) | __riscv_vlseg2e64_v_i64m1(__VA_ARGS__) |
| #define vlseg2e64_v_i64m1_m | ( | ... | ) | __riscv_vlseg2e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_i64m2 | ( | ... | ) | __riscv_vlseg2e64_v_i64m2(__VA_ARGS__) |
| #define vlseg2e64_v_i64m2_m | ( | ... | ) | __riscv_vlseg2e64_v_i64m2_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_i64m4 | ( | ... | ) | __riscv_vlseg2e64_v_i64m4(__VA_ARGS__) |
| #define vlseg2e64_v_i64m4_m | ( | ... | ) | __riscv_vlseg2e64_v_i64m4_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_u64m1 | ( | ... | ) | __riscv_vlseg2e64_v_u64m1(__VA_ARGS__) |
| #define vlseg2e64_v_u64m1_m | ( | ... | ) | __riscv_vlseg2e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_u64m2 | ( | ... | ) | __riscv_vlseg2e64_v_u64m2(__VA_ARGS__) |
| #define vlseg2e64_v_u64m2_m | ( | ... | ) | __riscv_vlseg2e64_v_u64m2_tumu(__VA_ARGS__) |
| #define vlseg2e64_v_u64m4 | ( | ... | ) | __riscv_vlseg2e64_v_u64m4(__VA_ARGS__) |
| #define vlseg2e64_v_u64m4_m | ( | ... | ) | __riscv_vlseg2e64_v_u64m4_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_f64m1 | ( | ... | ) | __riscv_vlseg2e64ff_v_f64m1(__VA_ARGS__) |
| #define vlseg2e64ff_v_f64m1_m | ( | ... | ) | __riscv_vlseg2e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_f64m2 | ( | ... | ) | __riscv_vlseg2e64ff_v_f64m2(__VA_ARGS__) |
| #define vlseg2e64ff_v_f64m2_m | ( | ... | ) | __riscv_vlseg2e64ff_v_f64m2_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_f64m4 | ( | ... | ) | __riscv_vlseg2e64ff_v_f64m4(__VA_ARGS__) |
| #define vlseg2e64ff_v_f64m4_m | ( | ... | ) | __riscv_vlseg2e64ff_v_f64m4_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_i64m1 | ( | ... | ) | __riscv_vlseg2e64ff_v_i64m1(__VA_ARGS__) |
| #define vlseg2e64ff_v_i64m1_m | ( | ... | ) | __riscv_vlseg2e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_i64m2 | ( | ... | ) | __riscv_vlseg2e64ff_v_i64m2(__VA_ARGS__) |
| #define vlseg2e64ff_v_i64m2_m | ( | ... | ) | __riscv_vlseg2e64ff_v_i64m2_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_i64m4 | ( | ... | ) | __riscv_vlseg2e64ff_v_i64m4(__VA_ARGS__) |
| #define vlseg2e64ff_v_i64m4_m | ( | ... | ) | __riscv_vlseg2e64ff_v_i64m4_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_u64m1 | ( | ... | ) | __riscv_vlseg2e64ff_v_u64m1(__VA_ARGS__) |
| #define vlseg2e64ff_v_u64m1_m | ( | ... | ) | __riscv_vlseg2e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_u64m2 | ( | ... | ) | __riscv_vlseg2e64ff_v_u64m2(__VA_ARGS__) |
| #define vlseg2e64ff_v_u64m2_m | ( | ... | ) | __riscv_vlseg2e64ff_v_u64m2_tumu(__VA_ARGS__) |
| #define vlseg2e64ff_v_u64m4 | ( | ... | ) | __riscv_vlseg2e64ff_v_u64m4(__VA_ARGS__) |
| #define vlseg2e64ff_v_u64m4_m | ( | ... | ) | __riscv_vlseg2e64ff_v_u64m4_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_i8m1 | ( | ... | ) | __riscv_vlseg2e8_v_i8m1(__VA_ARGS__) |
| #define vlseg2e8_v_i8m1_m | ( | ... | ) | __riscv_vlseg2e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_i8m2 | ( | ... | ) | __riscv_vlseg2e8_v_i8m2(__VA_ARGS__) |
| #define vlseg2e8_v_i8m2_m | ( | ... | ) | __riscv_vlseg2e8_v_i8m2_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_i8m4 | ( | ... | ) | __riscv_vlseg2e8_v_i8m4(__VA_ARGS__) |
| #define vlseg2e8_v_i8m4_m | ( | ... | ) | __riscv_vlseg2e8_v_i8m4_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_i8mf2 | ( | ... | ) | __riscv_vlseg2e8_v_i8mf2(__VA_ARGS__) |
| #define vlseg2e8_v_i8mf2_m | ( | ... | ) | __riscv_vlseg2e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_i8mf4 | ( | ... | ) | __riscv_vlseg2e8_v_i8mf4(__VA_ARGS__) |
| #define vlseg2e8_v_i8mf4_m | ( | ... | ) | __riscv_vlseg2e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_i8mf8 | ( | ... | ) | __riscv_vlseg2e8_v_i8mf8(__VA_ARGS__) |
| #define vlseg2e8_v_i8mf8_m | ( | ... | ) | __riscv_vlseg2e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_u8m1 | ( | ... | ) | __riscv_vlseg2e8_v_u8m1(__VA_ARGS__) |
| #define vlseg2e8_v_u8m1_m | ( | ... | ) | __riscv_vlseg2e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_u8m2 | ( | ... | ) | __riscv_vlseg2e8_v_u8m2(__VA_ARGS__) |
| #define vlseg2e8_v_u8m2_m | ( | ... | ) | __riscv_vlseg2e8_v_u8m2_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_u8m4 | ( | ... | ) | __riscv_vlseg2e8_v_u8m4(__VA_ARGS__) |
| #define vlseg2e8_v_u8m4_m | ( | ... | ) | __riscv_vlseg2e8_v_u8m4_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_u8mf2 | ( | ... | ) | __riscv_vlseg2e8_v_u8mf2(__VA_ARGS__) |
| #define vlseg2e8_v_u8mf2_m | ( | ... | ) | __riscv_vlseg2e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_u8mf4 | ( | ... | ) | __riscv_vlseg2e8_v_u8mf4(__VA_ARGS__) |
| #define vlseg2e8_v_u8mf4_m | ( | ... | ) | __riscv_vlseg2e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg2e8_v_u8mf8 | ( | ... | ) | __riscv_vlseg2e8_v_u8mf8(__VA_ARGS__) |
| #define vlseg2e8_v_u8mf8_m | ( | ... | ) | __riscv_vlseg2e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8m1 | ( | ... | ) | __riscv_vlseg2e8ff_v_i8m1(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8m1_m | ( | ... | ) | __riscv_vlseg2e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8m2 | ( | ... | ) | __riscv_vlseg2e8ff_v_i8m2(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8m2_m | ( | ... | ) | __riscv_vlseg2e8ff_v_i8m2_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8m4 | ( | ... | ) | __riscv_vlseg2e8ff_v_i8m4(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8m4_m | ( | ... | ) | __riscv_vlseg2e8ff_v_i8m4_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8mf2 | ( | ... | ) | __riscv_vlseg2e8ff_v_i8mf2(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8mf2_m | ( | ... | ) | __riscv_vlseg2e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8mf4 | ( | ... | ) | __riscv_vlseg2e8ff_v_i8mf4(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8mf4_m | ( | ... | ) | __riscv_vlseg2e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8mf8 | ( | ... | ) | __riscv_vlseg2e8ff_v_i8mf8(__VA_ARGS__) |
| #define vlseg2e8ff_v_i8mf8_m | ( | ... | ) | __riscv_vlseg2e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8m1 | ( | ... | ) | __riscv_vlseg2e8ff_v_u8m1(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8m1_m | ( | ... | ) | __riscv_vlseg2e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8m2 | ( | ... | ) | __riscv_vlseg2e8ff_v_u8m2(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8m2_m | ( | ... | ) | __riscv_vlseg2e8ff_v_u8m2_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8m4 | ( | ... | ) | __riscv_vlseg2e8ff_v_u8m4(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8m4_m | ( | ... | ) | __riscv_vlseg2e8ff_v_u8m4_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8mf2 | ( | ... | ) | __riscv_vlseg2e8ff_v_u8mf2(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8mf2_m | ( | ... | ) | __riscv_vlseg2e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8mf4 | ( | ... | ) | __riscv_vlseg2e8ff_v_u8mf4(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8mf4_m | ( | ... | ) | __riscv_vlseg2e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8mf8 | ( | ... | ) | __riscv_vlseg2e8ff_v_u8mf8(__VA_ARGS__) |
| #define vlseg2e8ff_v_u8mf8_m | ( | ... | ) | __riscv_vlseg2e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_f16m1 | ( | ... | ) | __riscv_vlseg3e16_v_f16m1(__VA_ARGS__) |
| #define vlseg3e16_v_f16m1_m | ( | ... | ) | __riscv_vlseg3e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_f16m2 | ( | ... | ) | __riscv_vlseg3e16_v_f16m2(__VA_ARGS__) |
| #define vlseg3e16_v_f16m2_m | ( | ... | ) | __riscv_vlseg3e16_v_f16m2_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_f16mf2 | ( | ... | ) | __riscv_vlseg3e16_v_f16mf2(__VA_ARGS__) |
| #define vlseg3e16_v_f16mf2_m | ( | ... | ) | __riscv_vlseg3e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_f16mf4 | ( | ... | ) | __riscv_vlseg3e16_v_f16mf4(__VA_ARGS__) |
| #define vlseg3e16_v_f16mf4_m | ( | ... | ) | __riscv_vlseg3e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_i16m1 | ( | ... | ) | __riscv_vlseg3e16_v_i16m1(__VA_ARGS__) |
| #define vlseg3e16_v_i16m1_m | ( | ... | ) | __riscv_vlseg3e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_i16m2 | ( | ... | ) | __riscv_vlseg3e16_v_i16m2(__VA_ARGS__) |
| #define vlseg3e16_v_i16m2_m | ( | ... | ) | __riscv_vlseg3e16_v_i16m2_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_i16mf2 | ( | ... | ) | __riscv_vlseg3e16_v_i16mf2(__VA_ARGS__) |
| #define vlseg3e16_v_i16mf2_m | ( | ... | ) | __riscv_vlseg3e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_i16mf4 | ( | ... | ) | __riscv_vlseg3e16_v_i16mf4(__VA_ARGS__) |
| #define vlseg3e16_v_i16mf4_m | ( | ... | ) | __riscv_vlseg3e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_u16m1 | ( | ... | ) | __riscv_vlseg3e16_v_u16m1(__VA_ARGS__) |
| #define vlseg3e16_v_u16m1_m | ( | ... | ) | __riscv_vlseg3e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_u16m2 | ( | ... | ) | __riscv_vlseg3e16_v_u16m2(__VA_ARGS__) |
| #define vlseg3e16_v_u16m2_m | ( | ... | ) | __riscv_vlseg3e16_v_u16m2_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_u16mf2 | ( | ... | ) | __riscv_vlseg3e16_v_u16mf2(__VA_ARGS__) |
| #define vlseg3e16_v_u16mf2_m | ( | ... | ) | __riscv_vlseg3e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg3e16_v_u16mf4 | ( | ... | ) | __riscv_vlseg3e16_v_u16mf4(__VA_ARGS__) |
| #define vlseg3e16_v_u16mf4_m | ( | ... | ) | __riscv_vlseg3e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_f16m1 | ( | ... | ) | __riscv_vlseg3e16ff_v_f16m1(__VA_ARGS__) |
| #define vlseg3e16ff_v_f16m1_m | ( | ... | ) | __riscv_vlseg3e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_f16m2 | ( | ... | ) | __riscv_vlseg3e16ff_v_f16m2(__VA_ARGS__) |
| #define vlseg3e16ff_v_f16m2_m | ( | ... | ) | __riscv_vlseg3e16ff_v_f16m2_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_f16mf2 | ( | ... | ) | __riscv_vlseg3e16ff_v_f16mf2(__VA_ARGS__) |
| #define vlseg3e16ff_v_f16mf2_m | ( | ... | ) | __riscv_vlseg3e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_f16mf4 | ( | ... | ) | __riscv_vlseg3e16ff_v_f16mf4(__VA_ARGS__) |
| #define vlseg3e16ff_v_f16mf4_m | ( | ... | ) | __riscv_vlseg3e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_i16m1 | ( | ... | ) | __riscv_vlseg3e16ff_v_i16m1(__VA_ARGS__) |
| #define vlseg3e16ff_v_i16m1_m | ( | ... | ) | __riscv_vlseg3e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_i16m2 | ( | ... | ) | __riscv_vlseg3e16ff_v_i16m2(__VA_ARGS__) |
| #define vlseg3e16ff_v_i16m2_m | ( | ... | ) | __riscv_vlseg3e16ff_v_i16m2_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_i16mf2 | ( | ... | ) | __riscv_vlseg3e16ff_v_i16mf2(__VA_ARGS__) |
| #define vlseg3e16ff_v_i16mf2_m | ( | ... | ) | __riscv_vlseg3e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_i16mf4 | ( | ... | ) | __riscv_vlseg3e16ff_v_i16mf4(__VA_ARGS__) |
| #define vlseg3e16ff_v_i16mf4_m | ( | ... | ) | __riscv_vlseg3e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_u16m1 | ( | ... | ) | __riscv_vlseg3e16ff_v_u16m1(__VA_ARGS__) |
| #define vlseg3e16ff_v_u16m1_m | ( | ... | ) | __riscv_vlseg3e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_u16m2 | ( | ... | ) | __riscv_vlseg3e16ff_v_u16m2(__VA_ARGS__) |
| #define vlseg3e16ff_v_u16m2_m | ( | ... | ) | __riscv_vlseg3e16ff_v_u16m2_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_u16mf2 | ( | ... | ) | __riscv_vlseg3e16ff_v_u16mf2(__VA_ARGS__) |
| #define vlseg3e16ff_v_u16mf2_m | ( | ... | ) | __riscv_vlseg3e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg3e16ff_v_u16mf4 | ( | ... | ) | __riscv_vlseg3e16ff_v_u16mf4(__VA_ARGS__) |
| #define vlseg3e16ff_v_u16mf4_m | ( | ... | ) | __riscv_vlseg3e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_f32m1 | ( | ... | ) | __riscv_vlseg3e32_v_f32m1(__VA_ARGS__) |
| #define vlseg3e32_v_f32m1_m | ( | ... | ) | __riscv_vlseg3e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_f32m2 | ( | ... | ) | __riscv_vlseg3e32_v_f32m2(__VA_ARGS__) |
| #define vlseg3e32_v_f32m2_m | ( | ... | ) | __riscv_vlseg3e32_v_f32m2_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_f32mf2 | ( | ... | ) | __riscv_vlseg3e32_v_f32mf2(__VA_ARGS__) |
| #define vlseg3e32_v_f32mf2_m | ( | ... | ) | __riscv_vlseg3e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_i32m1 | ( | ... | ) | __riscv_vlseg3e32_v_i32m1(__VA_ARGS__) |
| #define vlseg3e32_v_i32m1_m | ( | ... | ) | __riscv_vlseg3e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_i32m2 | ( | ... | ) | __riscv_vlseg3e32_v_i32m2(__VA_ARGS__) |
| #define vlseg3e32_v_i32m2_m | ( | ... | ) | __riscv_vlseg3e32_v_i32m2_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_i32mf2 | ( | ... | ) | __riscv_vlseg3e32_v_i32mf2(__VA_ARGS__) |
| #define vlseg3e32_v_i32mf2_m | ( | ... | ) | __riscv_vlseg3e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_u32m1 | ( | ... | ) | __riscv_vlseg3e32_v_u32m1(__VA_ARGS__) |
| #define vlseg3e32_v_u32m1_m | ( | ... | ) | __riscv_vlseg3e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_u32m2 | ( | ... | ) | __riscv_vlseg3e32_v_u32m2(__VA_ARGS__) |
| #define vlseg3e32_v_u32m2_m | ( | ... | ) | __riscv_vlseg3e32_v_u32m2_tumu(__VA_ARGS__) |
| #define vlseg3e32_v_u32mf2 | ( | ... | ) | __riscv_vlseg3e32_v_u32mf2(__VA_ARGS__) |
| #define vlseg3e32_v_u32mf2_m | ( | ... | ) | __riscv_vlseg3e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_f32m1 | ( | ... | ) | __riscv_vlseg3e32ff_v_f32m1(__VA_ARGS__) |
| #define vlseg3e32ff_v_f32m1_m | ( | ... | ) | __riscv_vlseg3e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_f32m2 | ( | ... | ) | __riscv_vlseg3e32ff_v_f32m2(__VA_ARGS__) |
| #define vlseg3e32ff_v_f32m2_m | ( | ... | ) | __riscv_vlseg3e32ff_v_f32m2_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_f32mf2 | ( | ... | ) | __riscv_vlseg3e32ff_v_f32mf2(__VA_ARGS__) |
| #define vlseg3e32ff_v_f32mf2_m | ( | ... | ) | __riscv_vlseg3e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_i32m1 | ( | ... | ) | __riscv_vlseg3e32ff_v_i32m1(__VA_ARGS__) |
| #define vlseg3e32ff_v_i32m1_m | ( | ... | ) | __riscv_vlseg3e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_i32m2 | ( | ... | ) | __riscv_vlseg3e32ff_v_i32m2(__VA_ARGS__) |
| #define vlseg3e32ff_v_i32m2_m | ( | ... | ) | __riscv_vlseg3e32ff_v_i32m2_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_i32mf2 | ( | ... | ) | __riscv_vlseg3e32ff_v_i32mf2(__VA_ARGS__) |
| #define vlseg3e32ff_v_i32mf2_m | ( | ... | ) | __riscv_vlseg3e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_u32m1 | ( | ... | ) | __riscv_vlseg3e32ff_v_u32m1(__VA_ARGS__) |
| #define vlseg3e32ff_v_u32m1_m | ( | ... | ) | __riscv_vlseg3e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_u32m2 | ( | ... | ) | __riscv_vlseg3e32ff_v_u32m2(__VA_ARGS__) |
| #define vlseg3e32ff_v_u32m2_m | ( | ... | ) | __riscv_vlseg3e32ff_v_u32m2_tumu(__VA_ARGS__) |
| #define vlseg3e32ff_v_u32mf2 | ( | ... | ) | __riscv_vlseg3e32ff_v_u32mf2(__VA_ARGS__) |
| #define vlseg3e32ff_v_u32mf2_m | ( | ... | ) | __riscv_vlseg3e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg3e64_v_f64m1 | ( | ... | ) | __riscv_vlseg3e64_v_f64m1(__VA_ARGS__) |
| #define vlseg3e64_v_f64m1_m | ( | ... | ) | __riscv_vlseg3e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg3e64_v_f64m2 | ( | ... | ) | __riscv_vlseg3e64_v_f64m2(__VA_ARGS__) |
| #define vlseg3e64_v_f64m2_m | ( | ... | ) | __riscv_vlseg3e64_v_f64m2_tumu(__VA_ARGS__) |
| #define vlseg3e64_v_i64m1 | ( | ... | ) | __riscv_vlseg3e64_v_i64m1(__VA_ARGS__) |
| #define vlseg3e64_v_i64m1_m | ( | ... | ) | __riscv_vlseg3e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg3e64_v_i64m2 | ( | ... | ) | __riscv_vlseg3e64_v_i64m2(__VA_ARGS__) |
| #define vlseg3e64_v_i64m2_m | ( | ... | ) | __riscv_vlseg3e64_v_i64m2_tumu(__VA_ARGS__) |
| #define vlseg3e64_v_u64m1 | ( | ... | ) | __riscv_vlseg3e64_v_u64m1(__VA_ARGS__) |
| #define vlseg3e64_v_u64m1_m | ( | ... | ) | __riscv_vlseg3e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg3e64_v_u64m2 | ( | ... | ) | __riscv_vlseg3e64_v_u64m2(__VA_ARGS__) |
| #define vlseg3e64_v_u64m2_m | ( | ... | ) | __riscv_vlseg3e64_v_u64m2_tumu(__VA_ARGS__) |
| #define vlseg3e64ff_v_f64m1 | ( | ... | ) | __riscv_vlseg3e64ff_v_f64m1(__VA_ARGS__) |
| #define vlseg3e64ff_v_f64m1_m | ( | ... | ) | __riscv_vlseg3e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg3e64ff_v_f64m2 | ( | ... | ) | __riscv_vlseg3e64ff_v_f64m2(__VA_ARGS__) |
| #define vlseg3e64ff_v_f64m2_m | ( | ... | ) | __riscv_vlseg3e64ff_v_f64m2_tumu(__VA_ARGS__) |
| #define vlseg3e64ff_v_i64m1 | ( | ... | ) | __riscv_vlseg3e64ff_v_i64m1(__VA_ARGS__) |
| #define vlseg3e64ff_v_i64m1_m | ( | ... | ) | __riscv_vlseg3e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg3e64ff_v_i64m2 | ( | ... | ) | __riscv_vlseg3e64ff_v_i64m2(__VA_ARGS__) |
| #define vlseg3e64ff_v_i64m2_m | ( | ... | ) | __riscv_vlseg3e64ff_v_i64m2_tumu(__VA_ARGS__) |
| #define vlseg3e64ff_v_u64m1 | ( | ... | ) | __riscv_vlseg3e64ff_v_u64m1(__VA_ARGS__) |
| #define vlseg3e64ff_v_u64m1_m | ( | ... | ) | __riscv_vlseg3e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg3e64ff_v_u64m2 | ( | ... | ) | __riscv_vlseg3e64ff_v_u64m2(__VA_ARGS__) |
| #define vlseg3e64ff_v_u64m2_m | ( | ... | ) | __riscv_vlseg3e64ff_v_u64m2_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_i8m1 | ( | ... | ) | __riscv_vlseg3e8_v_i8m1(__VA_ARGS__) |
| #define vlseg3e8_v_i8m1_m | ( | ... | ) | __riscv_vlseg3e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_i8m2 | ( | ... | ) | __riscv_vlseg3e8_v_i8m2(__VA_ARGS__) |
| #define vlseg3e8_v_i8m2_m | ( | ... | ) | __riscv_vlseg3e8_v_i8m2_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_i8mf2 | ( | ... | ) | __riscv_vlseg3e8_v_i8mf2(__VA_ARGS__) |
| #define vlseg3e8_v_i8mf2_m | ( | ... | ) | __riscv_vlseg3e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_i8mf4 | ( | ... | ) | __riscv_vlseg3e8_v_i8mf4(__VA_ARGS__) |
| #define vlseg3e8_v_i8mf4_m | ( | ... | ) | __riscv_vlseg3e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_i8mf8 | ( | ... | ) | __riscv_vlseg3e8_v_i8mf8(__VA_ARGS__) |
| #define vlseg3e8_v_i8mf8_m | ( | ... | ) | __riscv_vlseg3e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_u8m1 | ( | ... | ) | __riscv_vlseg3e8_v_u8m1(__VA_ARGS__) |
| #define vlseg3e8_v_u8m1_m | ( | ... | ) | __riscv_vlseg3e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_u8m2 | ( | ... | ) | __riscv_vlseg3e8_v_u8m2(__VA_ARGS__) |
| #define vlseg3e8_v_u8m2_m | ( | ... | ) | __riscv_vlseg3e8_v_u8m2_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_u8mf2 | ( | ... | ) | __riscv_vlseg3e8_v_u8mf2(__VA_ARGS__) |
| #define vlseg3e8_v_u8mf2_m | ( | ... | ) | __riscv_vlseg3e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_u8mf4 | ( | ... | ) | __riscv_vlseg3e8_v_u8mf4(__VA_ARGS__) |
| #define vlseg3e8_v_u8mf4_m | ( | ... | ) | __riscv_vlseg3e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg3e8_v_u8mf8 | ( | ... | ) | __riscv_vlseg3e8_v_u8mf8(__VA_ARGS__) |
| #define vlseg3e8_v_u8mf8_m | ( | ... | ) | __riscv_vlseg3e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8m1 | ( | ... | ) | __riscv_vlseg3e8ff_v_i8m1(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8m1_m | ( | ... | ) | __riscv_vlseg3e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8m2 | ( | ... | ) | __riscv_vlseg3e8ff_v_i8m2(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8m2_m | ( | ... | ) | __riscv_vlseg3e8ff_v_i8m2_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8mf2 | ( | ... | ) | __riscv_vlseg3e8ff_v_i8mf2(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8mf2_m | ( | ... | ) | __riscv_vlseg3e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8mf4 | ( | ... | ) | __riscv_vlseg3e8ff_v_i8mf4(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8mf4_m | ( | ... | ) | __riscv_vlseg3e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8mf8 | ( | ... | ) | __riscv_vlseg3e8ff_v_i8mf8(__VA_ARGS__) |
| #define vlseg3e8ff_v_i8mf8_m | ( | ... | ) | __riscv_vlseg3e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8m1 | ( | ... | ) | __riscv_vlseg3e8ff_v_u8m1(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8m1_m | ( | ... | ) | __riscv_vlseg3e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8m2 | ( | ... | ) | __riscv_vlseg3e8ff_v_u8m2(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8m2_m | ( | ... | ) | __riscv_vlseg3e8ff_v_u8m2_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8mf2 | ( | ... | ) | __riscv_vlseg3e8ff_v_u8mf2(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8mf2_m | ( | ... | ) | __riscv_vlseg3e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8mf4 | ( | ... | ) | __riscv_vlseg3e8ff_v_u8mf4(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8mf4_m | ( | ... | ) | __riscv_vlseg3e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8mf8 | ( | ... | ) | __riscv_vlseg3e8ff_v_u8mf8(__VA_ARGS__) |
| #define vlseg3e8ff_v_u8mf8_m | ( | ... | ) | __riscv_vlseg3e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_f16m1 | ( | ... | ) | __riscv_vlseg4e16_v_f16m1(__VA_ARGS__) |
| #define vlseg4e16_v_f16m1_m | ( | ... | ) | __riscv_vlseg4e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_f16m2 | ( | ... | ) | __riscv_vlseg4e16_v_f16m2(__VA_ARGS__) |
| #define vlseg4e16_v_f16m2_m | ( | ... | ) | __riscv_vlseg4e16_v_f16m2_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_f16mf2 | ( | ... | ) | __riscv_vlseg4e16_v_f16mf2(__VA_ARGS__) |
| #define vlseg4e16_v_f16mf2_m | ( | ... | ) | __riscv_vlseg4e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_f16mf4 | ( | ... | ) | __riscv_vlseg4e16_v_f16mf4(__VA_ARGS__) |
| #define vlseg4e16_v_f16mf4_m | ( | ... | ) | __riscv_vlseg4e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_i16m1 | ( | ... | ) | __riscv_vlseg4e16_v_i16m1(__VA_ARGS__) |
| #define vlseg4e16_v_i16m1_m | ( | ... | ) | __riscv_vlseg4e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_i16m2 | ( | ... | ) | __riscv_vlseg4e16_v_i16m2(__VA_ARGS__) |
| #define vlseg4e16_v_i16m2_m | ( | ... | ) | __riscv_vlseg4e16_v_i16m2_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_i16mf2 | ( | ... | ) | __riscv_vlseg4e16_v_i16mf2(__VA_ARGS__) |
| #define vlseg4e16_v_i16mf2_m | ( | ... | ) | __riscv_vlseg4e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_i16mf4 | ( | ... | ) | __riscv_vlseg4e16_v_i16mf4(__VA_ARGS__) |
| #define vlseg4e16_v_i16mf4_m | ( | ... | ) | __riscv_vlseg4e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_u16m1 | ( | ... | ) | __riscv_vlseg4e16_v_u16m1(__VA_ARGS__) |
| #define vlseg4e16_v_u16m1_m | ( | ... | ) | __riscv_vlseg4e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_u16m2 | ( | ... | ) | __riscv_vlseg4e16_v_u16m2(__VA_ARGS__) |
| #define vlseg4e16_v_u16m2_m | ( | ... | ) | __riscv_vlseg4e16_v_u16m2_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_u16mf2 | ( | ... | ) | __riscv_vlseg4e16_v_u16mf2(__VA_ARGS__) |
| #define vlseg4e16_v_u16mf2_m | ( | ... | ) | __riscv_vlseg4e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg4e16_v_u16mf4 | ( | ... | ) | __riscv_vlseg4e16_v_u16mf4(__VA_ARGS__) |
| #define vlseg4e16_v_u16mf4_m | ( | ... | ) | __riscv_vlseg4e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_f16m1 | ( | ... | ) | __riscv_vlseg4e16ff_v_f16m1(__VA_ARGS__) |
| #define vlseg4e16ff_v_f16m1_m | ( | ... | ) | __riscv_vlseg4e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_f16m2 | ( | ... | ) | __riscv_vlseg4e16ff_v_f16m2(__VA_ARGS__) |
| #define vlseg4e16ff_v_f16m2_m | ( | ... | ) | __riscv_vlseg4e16ff_v_f16m2_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_f16mf2 | ( | ... | ) | __riscv_vlseg4e16ff_v_f16mf2(__VA_ARGS__) |
| #define vlseg4e16ff_v_f16mf2_m | ( | ... | ) | __riscv_vlseg4e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_f16mf4 | ( | ... | ) | __riscv_vlseg4e16ff_v_f16mf4(__VA_ARGS__) |
| #define vlseg4e16ff_v_f16mf4_m | ( | ... | ) | __riscv_vlseg4e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_i16m1 | ( | ... | ) | __riscv_vlseg4e16ff_v_i16m1(__VA_ARGS__) |
| #define vlseg4e16ff_v_i16m1_m | ( | ... | ) | __riscv_vlseg4e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_i16m2 | ( | ... | ) | __riscv_vlseg4e16ff_v_i16m2(__VA_ARGS__) |
| #define vlseg4e16ff_v_i16m2_m | ( | ... | ) | __riscv_vlseg4e16ff_v_i16m2_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_i16mf2 | ( | ... | ) | __riscv_vlseg4e16ff_v_i16mf2(__VA_ARGS__) |
| #define vlseg4e16ff_v_i16mf2_m | ( | ... | ) | __riscv_vlseg4e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_i16mf4 | ( | ... | ) | __riscv_vlseg4e16ff_v_i16mf4(__VA_ARGS__) |
| #define vlseg4e16ff_v_i16mf4_m | ( | ... | ) | __riscv_vlseg4e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_u16m1 | ( | ... | ) | __riscv_vlseg4e16ff_v_u16m1(__VA_ARGS__) |
| #define vlseg4e16ff_v_u16m1_m | ( | ... | ) | __riscv_vlseg4e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_u16m2 | ( | ... | ) | __riscv_vlseg4e16ff_v_u16m2(__VA_ARGS__) |
| #define vlseg4e16ff_v_u16m2_m | ( | ... | ) | __riscv_vlseg4e16ff_v_u16m2_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_u16mf2 | ( | ... | ) | __riscv_vlseg4e16ff_v_u16mf2(__VA_ARGS__) |
| #define vlseg4e16ff_v_u16mf2_m | ( | ... | ) | __riscv_vlseg4e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg4e16ff_v_u16mf4 | ( | ... | ) | __riscv_vlseg4e16ff_v_u16mf4(__VA_ARGS__) |
| #define vlseg4e16ff_v_u16mf4_m | ( | ... | ) | __riscv_vlseg4e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_f32m1 | ( | ... | ) | __riscv_vlseg4e32_v_f32m1(__VA_ARGS__) |
| #define vlseg4e32_v_f32m1_m | ( | ... | ) | __riscv_vlseg4e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_f32m2 | ( | ... | ) | __riscv_vlseg4e32_v_f32m2(__VA_ARGS__) |
| #define vlseg4e32_v_f32m2_m | ( | ... | ) | __riscv_vlseg4e32_v_f32m2_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_f32mf2 | ( | ... | ) | __riscv_vlseg4e32_v_f32mf2(__VA_ARGS__) |
| #define vlseg4e32_v_f32mf2_m | ( | ... | ) | __riscv_vlseg4e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_i32m1 | ( | ... | ) | __riscv_vlseg4e32_v_i32m1(__VA_ARGS__) |
| #define vlseg4e32_v_i32m1_m | ( | ... | ) | __riscv_vlseg4e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_i32m2 | ( | ... | ) | __riscv_vlseg4e32_v_i32m2(__VA_ARGS__) |
| #define vlseg4e32_v_i32m2_m | ( | ... | ) | __riscv_vlseg4e32_v_i32m2_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_i32mf2 | ( | ... | ) | __riscv_vlseg4e32_v_i32mf2(__VA_ARGS__) |
| #define vlseg4e32_v_i32mf2_m | ( | ... | ) | __riscv_vlseg4e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_u32m1 | ( | ... | ) | __riscv_vlseg4e32_v_u32m1(__VA_ARGS__) |
| #define vlseg4e32_v_u32m1_m | ( | ... | ) | __riscv_vlseg4e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_u32m2 | ( | ... | ) | __riscv_vlseg4e32_v_u32m2(__VA_ARGS__) |
| #define vlseg4e32_v_u32m2_m | ( | ... | ) | __riscv_vlseg4e32_v_u32m2_tumu(__VA_ARGS__) |
| #define vlseg4e32_v_u32mf2 | ( | ... | ) | __riscv_vlseg4e32_v_u32mf2(__VA_ARGS__) |
| #define vlseg4e32_v_u32mf2_m | ( | ... | ) | __riscv_vlseg4e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_f32m1 | ( | ... | ) | __riscv_vlseg4e32ff_v_f32m1(__VA_ARGS__) |
| #define vlseg4e32ff_v_f32m1_m | ( | ... | ) | __riscv_vlseg4e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_f32m2 | ( | ... | ) | __riscv_vlseg4e32ff_v_f32m2(__VA_ARGS__) |
| #define vlseg4e32ff_v_f32m2_m | ( | ... | ) | __riscv_vlseg4e32ff_v_f32m2_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_f32mf2 | ( | ... | ) | __riscv_vlseg4e32ff_v_f32mf2(__VA_ARGS__) |
| #define vlseg4e32ff_v_f32mf2_m | ( | ... | ) | __riscv_vlseg4e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_i32m1 | ( | ... | ) | __riscv_vlseg4e32ff_v_i32m1(__VA_ARGS__) |
| #define vlseg4e32ff_v_i32m1_m | ( | ... | ) | __riscv_vlseg4e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_i32m2 | ( | ... | ) | __riscv_vlseg4e32ff_v_i32m2(__VA_ARGS__) |
| #define vlseg4e32ff_v_i32m2_m | ( | ... | ) | __riscv_vlseg4e32ff_v_i32m2_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_i32mf2 | ( | ... | ) | __riscv_vlseg4e32ff_v_i32mf2(__VA_ARGS__) |
| #define vlseg4e32ff_v_i32mf2_m | ( | ... | ) | __riscv_vlseg4e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_u32m1 | ( | ... | ) | __riscv_vlseg4e32ff_v_u32m1(__VA_ARGS__) |
| #define vlseg4e32ff_v_u32m1_m | ( | ... | ) | __riscv_vlseg4e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_u32m2 | ( | ... | ) | __riscv_vlseg4e32ff_v_u32m2(__VA_ARGS__) |
| #define vlseg4e32ff_v_u32m2_m | ( | ... | ) | __riscv_vlseg4e32ff_v_u32m2_tumu(__VA_ARGS__) |
| #define vlseg4e32ff_v_u32mf2 | ( | ... | ) | __riscv_vlseg4e32ff_v_u32mf2(__VA_ARGS__) |
| #define vlseg4e32ff_v_u32mf2_m | ( | ... | ) | __riscv_vlseg4e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg4e64_v_f64m1 | ( | ... | ) | __riscv_vlseg4e64_v_f64m1(__VA_ARGS__) |
| #define vlseg4e64_v_f64m1_m | ( | ... | ) | __riscv_vlseg4e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg4e64_v_f64m2 | ( | ... | ) | __riscv_vlseg4e64_v_f64m2(__VA_ARGS__) |
| #define vlseg4e64_v_f64m2_m | ( | ... | ) | __riscv_vlseg4e64_v_f64m2_tumu(__VA_ARGS__) |
| #define vlseg4e64_v_i64m1 | ( | ... | ) | __riscv_vlseg4e64_v_i64m1(__VA_ARGS__) |
| #define vlseg4e64_v_i64m1_m | ( | ... | ) | __riscv_vlseg4e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg4e64_v_i64m2 | ( | ... | ) | __riscv_vlseg4e64_v_i64m2(__VA_ARGS__) |
| #define vlseg4e64_v_i64m2_m | ( | ... | ) | __riscv_vlseg4e64_v_i64m2_tumu(__VA_ARGS__) |
| #define vlseg4e64_v_u64m1 | ( | ... | ) | __riscv_vlseg4e64_v_u64m1(__VA_ARGS__) |
| #define vlseg4e64_v_u64m1_m | ( | ... | ) | __riscv_vlseg4e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg4e64_v_u64m2 | ( | ... | ) | __riscv_vlseg4e64_v_u64m2(__VA_ARGS__) |
| #define vlseg4e64_v_u64m2_m | ( | ... | ) | __riscv_vlseg4e64_v_u64m2_tumu(__VA_ARGS__) |
| #define vlseg4e64ff_v_f64m1 | ( | ... | ) | __riscv_vlseg4e64ff_v_f64m1(__VA_ARGS__) |
| #define vlseg4e64ff_v_f64m1_m | ( | ... | ) | __riscv_vlseg4e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg4e64ff_v_f64m2 | ( | ... | ) | __riscv_vlseg4e64ff_v_f64m2(__VA_ARGS__) |
| #define vlseg4e64ff_v_f64m2_m | ( | ... | ) | __riscv_vlseg4e64ff_v_f64m2_tumu(__VA_ARGS__) |
| #define vlseg4e64ff_v_i64m1 | ( | ... | ) | __riscv_vlseg4e64ff_v_i64m1(__VA_ARGS__) |
| #define vlseg4e64ff_v_i64m1_m | ( | ... | ) | __riscv_vlseg4e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg4e64ff_v_i64m2 | ( | ... | ) | __riscv_vlseg4e64ff_v_i64m2(__VA_ARGS__) |
| #define vlseg4e64ff_v_i64m2_m | ( | ... | ) | __riscv_vlseg4e64ff_v_i64m2_tumu(__VA_ARGS__) |
| #define vlseg4e64ff_v_u64m1 | ( | ... | ) | __riscv_vlseg4e64ff_v_u64m1(__VA_ARGS__) |
| #define vlseg4e64ff_v_u64m1_m | ( | ... | ) | __riscv_vlseg4e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg4e64ff_v_u64m2 | ( | ... | ) | __riscv_vlseg4e64ff_v_u64m2(__VA_ARGS__) |
| #define vlseg4e64ff_v_u64m2_m | ( | ... | ) | __riscv_vlseg4e64ff_v_u64m2_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_i8m1 | ( | ... | ) | __riscv_vlseg4e8_v_i8m1(__VA_ARGS__) |
| #define vlseg4e8_v_i8m1_m | ( | ... | ) | __riscv_vlseg4e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_i8m2 | ( | ... | ) | __riscv_vlseg4e8_v_i8m2(__VA_ARGS__) |
| #define vlseg4e8_v_i8m2_m | ( | ... | ) | __riscv_vlseg4e8_v_i8m2_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_i8mf2 | ( | ... | ) | __riscv_vlseg4e8_v_i8mf2(__VA_ARGS__) |
| #define vlseg4e8_v_i8mf2_m | ( | ... | ) | __riscv_vlseg4e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_i8mf4 | ( | ... | ) | __riscv_vlseg4e8_v_i8mf4(__VA_ARGS__) |
| #define vlseg4e8_v_i8mf4_m | ( | ... | ) | __riscv_vlseg4e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_i8mf8 | ( | ... | ) | __riscv_vlseg4e8_v_i8mf8(__VA_ARGS__) |
| #define vlseg4e8_v_i8mf8_m | ( | ... | ) | __riscv_vlseg4e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_u8m1 | ( | ... | ) | __riscv_vlseg4e8_v_u8m1(__VA_ARGS__) |
| #define vlseg4e8_v_u8m1_m | ( | ... | ) | __riscv_vlseg4e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_u8m2 | ( | ... | ) | __riscv_vlseg4e8_v_u8m2(__VA_ARGS__) |
| #define vlseg4e8_v_u8m2_m | ( | ... | ) | __riscv_vlseg4e8_v_u8m2_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_u8mf2 | ( | ... | ) | __riscv_vlseg4e8_v_u8mf2(__VA_ARGS__) |
| #define vlseg4e8_v_u8mf2_m | ( | ... | ) | __riscv_vlseg4e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_u8mf4 | ( | ... | ) | __riscv_vlseg4e8_v_u8mf4(__VA_ARGS__) |
| #define vlseg4e8_v_u8mf4_m | ( | ... | ) | __riscv_vlseg4e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg4e8_v_u8mf8 | ( | ... | ) | __riscv_vlseg4e8_v_u8mf8(__VA_ARGS__) |
| #define vlseg4e8_v_u8mf8_m | ( | ... | ) | __riscv_vlseg4e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8m1 | ( | ... | ) | __riscv_vlseg4e8ff_v_i8m1(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8m1_m | ( | ... | ) | __riscv_vlseg4e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8m2 | ( | ... | ) | __riscv_vlseg4e8ff_v_i8m2(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8m2_m | ( | ... | ) | __riscv_vlseg4e8ff_v_i8m2_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8mf2 | ( | ... | ) | __riscv_vlseg4e8ff_v_i8mf2(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8mf2_m | ( | ... | ) | __riscv_vlseg4e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8mf4 | ( | ... | ) | __riscv_vlseg4e8ff_v_i8mf4(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8mf4_m | ( | ... | ) | __riscv_vlseg4e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8mf8 | ( | ... | ) | __riscv_vlseg4e8ff_v_i8mf8(__VA_ARGS__) |
| #define vlseg4e8ff_v_i8mf8_m | ( | ... | ) | __riscv_vlseg4e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8m1 | ( | ... | ) | __riscv_vlseg4e8ff_v_u8m1(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8m1_m | ( | ... | ) | __riscv_vlseg4e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8m2 | ( | ... | ) | __riscv_vlseg4e8ff_v_u8m2(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8m2_m | ( | ... | ) | __riscv_vlseg4e8ff_v_u8m2_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8mf2 | ( | ... | ) | __riscv_vlseg4e8ff_v_u8mf2(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8mf2_m | ( | ... | ) | __riscv_vlseg4e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8mf4 | ( | ... | ) | __riscv_vlseg4e8ff_v_u8mf4(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8mf4_m | ( | ... | ) | __riscv_vlseg4e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8mf8 | ( | ... | ) | __riscv_vlseg4e8ff_v_u8mf8(__VA_ARGS__) |
| #define vlseg4e8ff_v_u8mf8_m | ( | ... | ) | __riscv_vlseg4e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_f16m1 | ( | ... | ) | __riscv_vlseg5e16_v_f16m1(__VA_ARGS__) |
| #define vlseg5e16_v_f16m1_m | ( | ... | ) | __riscv_vlseg5e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_f16mf2 | ( | ... | ) | __riscv_vlseg5e16_v_f16mf2(__VA_ARGS__) |
| #define vlseg5e16_v_f16mf2_m | ( | ... | ) | __riscv_vlseg5e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_f16mf4 | ( | ... | ) | __riscv_vlseg5e16_v_f16mf4(__VA_ARGS__) |
| #define vlseg5e16_v_f16mf4_m | ( | ... | ) | __riscv_vlseg5e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_i16m1 | ( | ... | ) | __riscv_vlseg5e16_v_i16m1(__VA_ARGS__) |
| #define vlseg5e16_v_i16m1_m | ( | ... | ) | __riscv_vlseg5e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_i16mf2 | ( | ... | ) | __riscv_vlseg5e16_v_i16mf2(__VA_ARGS__) |
| #define vlseg5e16_v_i16mf2_m | ( | ... | ) | __riscv_vlseg5e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_i16mf4 | ( | ... | ) | __riscv_vlseg5e16_v_i16mf4(__VA_ARGS__) |
| #define vlseg5e16_v_i16mf4_m | ( | ... | ) | __riscv_vlseg5e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_u16m1 | ( | ... | ) | __riscv_vlseg5e16_v_u16m1(__VA_ARGS__) |
| #define vlseg5e16_v_u16m1_m | ( | ... | ) | __riscv_vlseg5e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_u16mf2 | ( | ... | ) | __riscv_vlseg5e16_v_u16mf2(__VA_ARGS__) |
| #define vlseg5e16_v_u16mf2_m | ( | ... | ) | __riscv_vlseg5e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg5e16_v_u16mf4 | ( | ... | ) | __riscv_vlseg5e16_v_u16mf4(__VA_ARGS__) |
| #define vlseg5e16_v_u16mf4_m | ( | ... | ) | __riscv_vlseg5e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_f16m1 | ( | ... | ) | __riscv_vlseg5e16ff_v_f16m1(__VA_ARGS__) |
| #define vlseg5e16ff_v_f16m1_m | ( | ... | ) | __riscv_vlseg5e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_f16mf2 | ( | ... | ) | __riscv_vlseg5e16ff_v_f16mf2(__VA_ARGS__) |
| #define vlseg5e16ff_v_f16mf2_m | ( | ... | ) | __riscv_vlseg5e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_f16mf4 | ( | ... | ) | __riscv_vlseg5e16ff_v_f16mf4(__VA_ARGS__) |
| #define vlseg5e16ff_v_f16mf4_m | ( | ... | ) | __riscv_vlseg5e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_i16m1 | ( | ... | ) | __riscv_vlseg5e16ff_v_i16m1(__VA_ARGS__) |
| #define vlseg5e16ff_v_i16m1_m | ( | ... | ) | __riscv_vlseg5e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_i16mf2 | ( | ... | ) | __riscv_vlseg5e16ff_v_i16mf2(__VA_ARGS__) |
| #define vlseg5e16ff_v_i16mf2_m | ( | ... | ) | __riscv_vlseg5e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_i16mf4 | ( | ... | ) | __riscv_vlseg5e16ff_v_i16mf4(__VA_ARGS__) |
| #define vlseg5e16ff_v_i16mf4_m | ( | ... | ) | __riscv_vlseg5e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_u16m1 | ( | ... | ) | __riscv_vlseg5e16ff_v_u16m1(__VA_ARGS__) |
| #define vlseg5e16ff_v_u16m1_m | ( | ... | ) | __riscv_vlseg5e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_u16mf2 | ( | ... | ) | __riscv_vlseg5e16ff_v_u16mf2(__VA_ARGS__) |
| #define vlseg5e16ff_v_u16mf2_m | ( | ... | ) | __riscv_vlseg5e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg5e16ff_v_u16mf4 | ( | ... | ) | __riscv_vlseg5e16ff_v_u16mf4(__VA_ARGS__) |
| #define vlseg5e16ff_v_u16mf4_m | ( | ... | ) | __riscv_vlseg5e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg5e32_v_f32m1 | ( | ... | ) | __riscv_vlseg5e32_v_f32m1(__VA_ARGS__) |
| #define vlseg5e32_v_f32m1_m | ( | ... | ) | __riscv_vlseg5e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg5e32_v_f32mf2 | ( | ... | ) | __riscv_vlseg5e32_v_f32mf2(__VA_ARGS__) |
| #define vlseg5e32_v_f32mf2_m | ( | ... | ) | __riscv_vlseg5e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg5e32_v_i32m1 | ( | ... | ) | __riscv_vlseg5e32_v_i32m1(__VA_ARGS__) |
| #define vlseg5e32_v_i32m1_m | ( | ... | ) | __riscv_vlseg5e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg5e32_v_i32mf2 | ( | ... | ) | __riscv_vlseg5e32_v_i32mf2(__VA_ARGS__) |
| #define vlseg5e32_v_i32mf2_m | ( | ... | ) | __riscv_vlseg5e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg5e32_v_u32m1 | ( | ... | ) | __riscv_vlseg5e32_v_u32m1(__VA_ARGS__) |
| #define vlseg5e32_v_u32m1_m | ( | ... | ) | __riscv_vlseg5e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg5e32_v_u32mf2 | ( | ... | ) | __riscv_vlseg5e32_v_u32mf2(__VA_ARGS__) |
| #define vlseg5e32_v_u32mf2_m | ( | ... | ) | __riscv_vlseg5e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg5e32ff_v_f32m1 | ( | ... | ) | __riscv_vlseg5e32ff_v_f32m1(__VA_ARGS__) |
| #define vlseg5e32ff_v_f32m1_m | ( | ... | ) | __riscv_vlseg5e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg5e32ff_v_f32mf2 | ( | ... | ) | __riscv_vlseg5e32ff_v_f32mf2(__VA_ARGS__) |
| #define vlseg5e32ff_v_f32mf2_m | ( | ... | ) | __riscv_vlseg5e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg5e32ff_v_i32m1 | ( | ... | ) | __riscv_vlseg5e32ff_v_i32m1(__VA_ARGS__) |
| #define vlseg5e32ff_v_i32m1_m | ( | ... | ) | __riscv_vlseg5e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg5e32ff_v_i32mf2 | ( | ... | ) | __riscv_vlseg5e32ff_v_i32mf2(__VA_ARGS__) |
| #define vlseg5e32ff_v_i32mf2_m | ( | ... | ) | __riscv_vlseg5e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg5e32ff_v_u32m1 | ( | ... | ) | __riscv_vlseg5e32ff_v_u32m1(__VA_ARGS__) |
| #define vlseg5e32ff_v_u32m1_m | ( | ... | ) | __riscv_vlseg5e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg5e32ff_v_u32mf2 | ( | ... | ) | __riscv_vlseg5e32ff_v_u32mf2(__VA_ARGS__) |
| #define vlseg5e32ff_v_u32mf2_m | ( | ... | ) | __riscv_vlseg5e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg5e64_v_f64m1 | ( | ... | ) | __riscv_vlseg5e64_v_f64m1(__VA_ARGS__) |
| #define vlseg5e64_v_f64m1_m | ( | ... | ) | __riscv_vlseg5e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg5e64_v_i64m1 | ( | ... | ) | __riscv_vlseg5e64_v_i64m1(__VA_ARGS__) |
| #define vlseg5e64_v_i64m1_m | ( | ... | ) | __riscv_vlseg5e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg5e64_v_u64m1 | ( | ... | ) | __riscv_vlseg5e64_v_u64m1(__VA_ARGS__) |
| #define vlseg5e64_v_u64m1_m | ( | ... | ) | __riscv_vlseg5e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg5e64ff_v_f64m1 | ( | ... | ) | __riscv_vlseg5e64ff_v_f64m1(__VA_ARGS__) |
| #define vlseg5e64ff_v_f64m1_m | ( | ... | ) | __riscv_vlseg5e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg5e64ff_v_i64m1 | ( | ... | ) | __riscv_vlseg5e64ff_v_i64m1(__VA_ARGS__) |
| #define vlseg5e64ff_v_i64m1_m | ( | ... | ) | __riscv_vlseg5e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg5e64ff_v_u64m1 | ( | ... | ) | __riscv_vlseg5e64ff_v_u64m1(__VA_ARGS__) |
| #define vlseg5e64ff_v_u64m1_m | ( | ... | ) | __riscv_vlseg5e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg5e8_v_i8m1 | ( | ... | ) | __riscv_vlseg5e8_v_i8m1(__VA_ARGS__) |
| #define vlseg5e8_v_i8m1_m | ( | ... | ) | __riscv_vlseg5e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg5e8_v_i8mf2 | ( | ... | ) | __riscv_vlseg5e8_v_i8mf2(__VA_ARGS__) |
| #define vlseg5e8_v_i8mf2_m | ( | ... | ) | __riscv_vlseg5e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg5e8_v_i8mf4 | ( | ... | ) | __riscv_vlseg5e8_v_i8mf4(__VA_ARGS__) |
| #define vlseg5e8_v_i8mf4_m | ( | ... | ) | __riscv_vlseg5e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg5e8_v_i8mf8 | ( | ... | ) | __riscv_vlseg5e8_v_i8mf8(__VA_ARGS__) |
| #define vlseg5e8_v_i8mf8_m | ( | ... | ) | __riscv_vlseg5e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg5e8_v_u8m1 | ( | ... | ) | __riscv_vlseg5e8_v_u8m1(__VA_ARGS__) |
| #define vlseg5e8_v_u8m1_m | ( | ... | ) | __riscv_vlseg5e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg5e8_v_u8mf2 | ( | ... | ) | __riscv_vlseg5e8_v_u8mf2(__VA_ARGS__) |
| #define vlseg5e8_v_u8mf2_m | ( | ... | ) | __riscv_vlseg5e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg5e8_v_u8mf4 | ( | ... | ) | __riscv_vlseg5e8_v_u8mf4(__VA_ARGS__) |
| #define vlseg5e8_v_u8mf4_m | ( | ... | ) | __riscv_vlseg5e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg5e8_v_u8mf8 | ( | ... | ) | __riscv_vlseg5e8_v_u8mf8(__VA_ARGS__) |
| #define vlseg5e8_v_u8mf8_m | ( | ... | ) | __riscv_vlseg5e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg5e8ff_v_i8m1 | ( | ... | ) | __riscv_vlseg5e8ff_v_i8m1(__VA_ARGS__) |
| #define vlseg5e8ff_v_i8m1_m | ( | ... | ) | __riscv_vlseg5e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg5e8ff_v_i8mf2 | ( | ... | ) | __riscv_vlseg5e8ff_v_i8mf2(__VA_ARGS__) |
| #define vlseg5e8ff_v_i8mf2_m | ( | ... | ) | __riscv_vlseg5e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg5e8ff_v_i8mf4 | ( | ... | ) | __riscv_vlseg5e8ff_v_i8mf4(__VA_ARGS__) |
| #define vlseg5e8ff_v_i8mf4_m | ( | ... | ) | __riscv_vlseg5e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg5e8ff_v_i8mf8 | ( | ... | ) | __riscv_vlseg5e8ff_v_i8mf8(__VA_ARGS__) |
| #define vlseg5e8ff_v_i8mf8_m | ( | ... | ) | __riscv_vlseg5e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg5e8ff_v_u8m1 | ( | ... | ) | __riscv_vlseg5e8ff_v_u8m1(__VA_ARGS__) |
| #define vlseg5e8ff_v_u8m1_m | ( | ... | ) | __riscv_vlseg5e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg5e8ff_v_u8mf2 | ( | ... | ) | __riscv_vlseg5e8ff_v_u8mf2(__VA_ARGS__) |
| #define vlseg5e8ff_v_u8mf2_m | ( | ... | ) | __riscv_vlseg5e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg5e8ff_v_u8mf4 | ( | ... | ) | __riscv_vlseg5e8ff_v_u8mf4(__VA_ARGS__) |
| #define vlseg5e8ff_v_u8mf4_m | ( | ... | ) | __riscv_vlseg5e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg5e8ff_v_u8mf8 | ( | ... | ) | __riscv_vlseg5e8ff_v_u8mf8(__VA_ARGS__) |
| #define vlseg5e8ff_v_u8mf8_m | ( | ... | ) | __riscv_vlseg5e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_f16m1 | ( | ... | ) | __riscv_vlseg6e16_v_f16m1(__VA_ARGS__) |
| #define vlseg6e16_v_f16m1_m | ( | ... | ) | __riscv_vlseg6e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_f16mf2 | ( | ... | ) | __riscv_vlseg6e16_v_f16mf2(__VA_ARGS__) |
| #define vlseg6e16_v_f16mf2_m | ( | ... | ) | __riscv_vlseg6e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_f16mf4 | ( | ... | ) | __riscv_vlseg6e16_v_f16mf4(__VA_ARGS__) |
| #define vlseg6e16_v_f16mf4_m | ( | ... | ) | __riscv_vlseg6e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_i16m1 | ( | ... | ) | __riscv_vlseg6e16_v_i16m1(__VA_ARGS__) |
| #define vlseg6e16_v_i16m1_m | ( | ... | ) | __riscv_vlseg6e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_i16mf2 | ( | ... | ) | __riscv_vlseg6e16_v_i16mf2(__VA_ARGS__) |
| #define vlseg6e16_v_i16mf2_m | ( | ... | ) | __riscv_vlseg6e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_i16mf4 | ( | ... | ) | __riscv_vlseg6e16_v_i16mf4(__VA_ARGS__) |
| #define vlseg6e16_v_i16mf4_m | ( | ... | ) | __riscv_vlseg6e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_u16m1 | ( | ... | ) | __riscv_vlseg6e16_v_u16m1(__VA_ARGS__) |
| #define vlseg6e16_v_u16m1_m | ( | ... | ) | __riscv_vlseg6e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_u16mf2 | ( | ... | ) | __riscv_vlseg6e16_v_u16mf2(__VA_ARGS__) |
| #define vlseg6e16_v_u16mf2_m | ( | ... | ) | __riscv_vlseg6e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg6e16_v_u16mf4 | ( | ... | ) | __riscv_vlseg6e16_v_u16mf4(__VA_ARGS__) |
| #define vlseg6e16_v_u16mf4_m | ( | ... | ) | __riscv_vlseg6e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_f16m1 | ( | ... | ) | __riscv_vlseg6e16ff_v_f16m1(__VA_ARGS__) |
| #define vlseg6e16ff_v_f16m1_m | ( | ... | ) | __riscv_vlseg6e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_f16mf2 | ( | ... | ) | __riscv_vlseg6e16ff_v_f16mf2(__VA_ARGS__) |
| #define vlseg6e16ff_v_f16mf2_m | ( | ... | ) | __riscv_vlseg6e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_f16mf4 | ( | ... | ) | __riscv_vlseg6e16ff_v_f16mf4(__VA_ARGS__) |
| #define vlseg6e16ff_v_f16mf4_m | ( | ... | ) | __riscv_vlseg6e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_i16m1 | ( | ... | ) | __riscv_vlseg6e16ff_v_i16m1(__VA_ARGS__) |
| #define vlseg6e16ff_v_i16m1_m | ( | ... | ) | __riscv_vlseg6e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_i16mf2 | ( | ... | ) | __riscv_vlseg6e16ff_v_i16mf2(__VA_ARGS__) |
| #define vlseg6e16ff_v_i16mf2_m | ( | ... | ) | __riscv_vlseg6e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_i16mf4 | ( | ... | ) | __riscv_vlseg6e16ff_v_i16mf4(__VA_ARGS__) |
| #define vlseg6e16ff_v_i16mf4_m | ( | ... | ) | __riscv_vlseg6e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_u16m1 | ( | ... | ) | __riscv_vlseg6e16ff_v_u16m1(__VA_ARGS__) |
| #define vlseg6e16ff_v_u16m1_m | ( | ... | ) | __riscv_vlseg6e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_u16mf2 | ( | ... | ) | __riscv_vlseg6e16ff_v_u16mf2(__VA_ARGS__) |
| #define vlseg6e16ff_v_u16mf2_m | ( | ... | ) | __riscv_vlseg6e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg6e16ff_v_u16mf4 | ( | ... | ) | __riscv_vlseg6e16ff_v_u16mf4(__VA_ARGS__) |
| #define vlseg6e16ff_v_u16mf4_m | ( | ... | ) | __riscv_vlseg6e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg6e32_v_f32m1 | ( | ... | ) | __riscv_vlseg6e32_v_f32m1(__VA_ARGS__) |
| #define vlseg6e32_v_f32m1_m | ( | ... | ) | __riscv_vlseg6e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg6e32_v_f32mf2 | ( | ... | ) | __riscv_vlseg6e32_v_f32mf2(__VA_ARGS__) |
| #define vlseg6e32_v_f32mf2_m | ( | ... | ) | __riscv_vlseg6e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg6e32_v_i32m1 | ( | ... | ) | __riscv_vlseg6e32_v_i32m1(__VA_ARGS__) |
| #define vlseg6e32_v_i32m1_m | ( | ... | ) | __riscv_vlseg6e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg6e32_v_i32mf2 | ( | ... | ) | __riscv_vlseg6e32_v_i32mf2(__VA_ARGS__) |
| #define vlseg6e32_v_i32mf2_m | ( | ... | ) | __riscv_vlseg6e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg6e32_v_u32m1 | ( | ... | ) | __riscv_vlseg6e32_v_u32m1(__VA_ARGS__) |
| #define vlseg6e32_v_u32m1_m | ( | ... | ) | __riscv_vlseg6e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg6e32_v_u32mf2 | ( | ... | ) | __riscv_vlseg6e32_v_u32mf2(__VA_ARGS__) |
| #define vlseg6e32_v_u32mf2_m | ( | ... | ) | __riscv_vlseg6e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg6e32ff_v_f32m1 | ( | ... | ) | __riscv_vlseg6e32ff_v_f32m1(__VA_ARGS__) |
| #define vlseg6e32ff_v_f32m1_m | ( | ... | ) | __riscv_vlseg6e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg6e32ff_v_f32mf2 | ( | ... | ) | __riscv_vlseg6e32ff_v_f32mf2(__VA_ARGS__) |
| #define vlseg6e32ff_v_f32mf2_m | ( | ... | ) | __riscv_vlseg6e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg6e32ff_v_i32m1 | ( | ... | ) | __riscv_vlseg6e32ff_v_i32m1(__VA_ARGS__) |
| #define vlseg6e32ff_v_i32m1_m | ( | ... | ) | __riscv_vlseg6e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg6e32ff_v_i32mf2 | ( | ... | ) | __riscv_vlseg6e32ff_v_i32mf2(__VA_ARGS__) |
| #define vlseg6e32ff_v_i32mf2_m | ( | ... | ) | __riscv_vlseg6e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg6e32ff_v_u32m1 | ( | ... | ) | __riscv_vlseg6e32ff_v_u32m1(__VA_ARGS__) |
| #define vlseg6e32ff_v_u32m1_m | ( | ... | ) | __riscv_vlseg6e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg6e32ff_v_u32mf2 | ( | ... | ) | __riscv_vlseg6e32ff_v_u32mf2(__VA_ARGS__) |
| #define vlseg6e32ff_v_u32mf2_m | ( | ... | ) | __riscv_vlseg6e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg6e64_v_f64m1 | ( | ... | ) | __riscv_vlseg6e64_v_f64m1(__VA_ARGS__) |
| #define vlseg6e64_v_f64m1_m | ( | ... | ) | __riscv_vlseg6e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg6e64_v_i64m1 | ( | ... | ) | __riscv_vlseg6e64_v_i64m1(__VA_ARGS__) |
| #define vlseg6e64_v_i64m1_m | ( | ... | ) | __riscv_vlseg6e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg6e64_v_u64m1 | ( | ... | ) | __riscv_vlseg6e64_v_u64m1(__VA_ARGS__) |
| #define vlseg6e64_v_u64m1_m | ( | ... | ) | __riscv_vlseg6e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg6e64ff_v_f64m1 | ( | ... | ) | __riscv_vlseg6e64ff_v_f64m1(__VA_ARGS__) |
| #define vlseg6e64ff_v_f64m1_m | ( | ... | ) | __riscv_vlseg6e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg6e64ff_v_i64m1 | ( | ... | ) | __riscv_vlseg6e64ff_v_i64m1(__VA_ARGS__) |
| #define vlseg6e64ff_v_i64m1_m | ( | ... | ) | __riscv_vlseg6e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg6e64ff_v_u64m1 | ( | ... | ) | __riscv_vlseg6e64ff_v_u64m1(__VA_ARGS__) |
| #define vlseg6e64ff_v_u64m1_m | ( | ... | ) | __riscv_vlseg6e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg6e8_v_i8m1 | ( | ... | ) | __riscv_vlseg6e8_v_i8m1(__VA_ARGS__) |
| #define vlseg6e8_v_i8m1_m | ( | ... | ) | __riscv_vlseg6e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg6e8_v_i8mf2 | ( | ... | ) | __riscv_vlseg6e8_v_i8mf2(__VA_ARGS__) |
| #define vlseg6e8_v_i8mf2_m | ( | ... | ) | __riscv_vlseg6e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg6e8_v_i8mf4 | ( | ... | ) | __riscv_vlseg6e8_v_i8mf4(__VA_ARGS__) |
| #define vlseg6e8_v_i8mf4_m | ( | ... | ) | __riscv_vlseg6e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg6e8_v_i8mf8 | ( | ... | ) | __riscv_vlseg6e8_v_i8mf8(__VA_ARGS__) |
| #define vlseg6e8_v_i8mf8_m | ( | ... | ) | __riscv_vlseg6e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg6e8_v_u8m1 | ( | ... | ) | __riscv_vlseg6e8_v_u8m1(__VA_ARGS__) |
| #define vlseg6e8_v_u8m1_m | ( | ... | ) | __riscv_vlseg6e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg6e8_v_u8mf2 | ( | ... | ) | __riscv_vlseg6e8_v_u8mf2(__VA_ARGS__) |
| #define vlseg6e8_v_u8mf2_m | ( | ... | ) | __riscv_vlseg6e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg6e8_v_u8mf4 | ( | ... | ) | __riscv_vlseg6e8_v_u8mf4(__VA_ARGS__) |
| #define vlseg6e8_v_u8mf4_m | ( | ... | ) | __riscv_vlseg6e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg6e8_v_u8mf8 | ( | ... | ) | __riscv_vlseg6e8_v_u8mf8(__VA_ARGS__) |
| #define vlseg6e8_v_u8mf8_m | ( | ... | ) | __riscv_vlseg6e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg6e8ff_v_i8m1 | ( | ... | ) | __riscv_vlseg6e8ff_v_i8m1(__VA_ARGS__) |
| #define vlseg6e8ff_v_i8m1_m | ( | ... | ) | __riscv_vlseg6e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg6e8ff_v_i8mf2 | ( | ... | ) | __riscv_vlseg6e8ff_v_i8mf2(__VA_ARGS__) |
| #define vlseg6e8ff_v_i8mf2_m | ( | ... | ) | __riscv_vlseg6e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg6e8ff_v_i8mf4 | ( | ... | ) | __riscv_vlseg6e8ff_v_i8mf4(__VA_ARGS__) |
| #define vlseg6e8ff_v_i8mf4_m | ( | ... | ) | __riscv_vlseg6e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg6e8ff_v_i8mf8 | ( | ... | ) | __riscv_vlseg6e8ff_v_i8mf8(__VA_ARGS__) |
| #define vlseg6e8ff_v_i8mf8_m | ( | ... | ) | __riscv_vlseg6e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg6e8ff_v_u8m1 | ( | ... | ) | __riscv_vlseg6e8ff_v_u8m1(__VA_ARGS__) |
| #define vlseg6e8ff_v_u8m1_m | ( | ... | ) | __riscv_vlseg6e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg6e8ff_v_u8mf2 | ( | ... | ) | __riscv_vlseg6e8ff_v_u8mf2(__VA_ARGS__) |
| #define vlseg6e8ff_v_u8mf2_m | ( | ... | ) | __riscv_vlseg6e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg6e8ff_v_u8mf4 | ( | ... | ) | __riscv_vlseg6e8ff_v_u8mf4(__VA_ARGS__) |
| #define vlseg6e8ff_v_u8mf4_m | ( | ... | ) | __riscv_vlseg6e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg6e8ff_v_u8mf8 | ( | ... | ) | __riscv_vlseg6e8ff_v_u8mf8(__VA_ARGS__) |
| #define vlseg6e8ff_v_u8mf8_m | ( | ... | ) | __riscv_vlseg6e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_f16m1 | ( | ... | ) | __riscv_vlseg7e16_v_f16m1(__VA_ARGS__) |
| #define vlseg7e16_v_f16m1_m | ( | ... | ) | __riscv_vlseg7e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_f16mf2 | ( | ... | ) | __riscv_vlseg7e16_v_f16mf2(__VA_ARGS__) |
| #define vlseg7e16_v_f16mf2_m | ( | ... | ) | __riscv_vlseg7e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_f16mf4 | ( | ... | ) | __riscv_vlseg7e16_v_f16mf4(__VA_ARGS__) |
| #define vlseg7e16_v_f16mf4_m | ( | ... | ) | __riscv_vlseg7e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_i16m1 | ( | ... | ) | __riscv_vlseg7e16_v_i16m1(__VA_ARGS__) |
| #define vlseg7e16_v_i16m1_m | ( | ... | ) | __riscv_vlseg7e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_i16mf2 | ( | ... | ) | __riscv_vlseg7e16_v_i16mf2(__VA_ARGS__) |
| #define vlseg7e16_v_i16mf2_m | ( | ... | ) | __riscv_vlseg7e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_i16mf4 | ( | ... | ) | __riscv_vlseg7e16_v_i16mf4(__VA_ARGS__) |
| #define vlseg7e16_v_i16mf4_m | ( | ... | ) | __riscv_vlseg7e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_u16m1 | ( | ... | ) | __riscv_vlseg7e16_v_u16m1(__VA_ARGS__) |
| #define vlseg7e16_v_u16m1_m | ( | ... | ) | __riscv_vlseg7e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_u16mf2 | ( | ... | ) | __riscv_vlseg7e16_v_u16mf2(__VA_ARGS__) |
| #define vlseg7e16_v_u16mf2_m | ( | ... | ) | __riscv_vlseg7e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg7e16_v_u16mf4 | ( | ... | ) | __riscv_vlseg7e16_v_u16mf4(__VA_ARGS__) |
| #define vlseg7e16_v_u16mf4_m | ( | ... | ) | __riscv_vlseg7e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_f16m1 | ( | ... | ) | __riscv_vlseg7e16ff_v_f16m1(__VA_ARGS__) |
| #define vlseg7e16ff_v_f16m1_m | ( | ... | ) | __riscv_vlseg7e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_f16mf2 | ( | ... | ) | __riscv_vlseg7e16ff_v_f16mf2(__VA_ARGS__) |
| #define vlseg7e16ff_v_f16mf2_m | ( | ... | ) | __riscv_vlseg7e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_f16mf4 | ( | ... | ) | __riscv_vlseg7e16ff_v_f16mf4(__VA_ARGS__) |
| #define vlseg7e16ff_v_f16mf4_m | ( | ... | ) | __riscv_vlseg7e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_i16m1 | ( | ... | ) | __riscv_vlseg7e16ff_v_i16m1(__VA_ARGS__) |
| #define vlseg7e16ff_v_i16m1_m | ( | ... | ) | __riscv_vlseg7e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_i16mf2 | ( | ... | ) | __riscv_vlseg7e16ff_v_i16mf2(__VA_ARGS__) |
| #define vlseg7e16ff_v_i16mf2_m | ( | ... | ) | __riscv_vlseg7e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_i16mf4 | ( | ... | ) | __riscv_vlseg7e16ff_v_i16mf4(__VA_ARGS__) |
| #define vlseg7e16ff_v_i16mf4_m | ( | ... | ) | __riscv_vlseg7e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_u16m1 | ( | ... | ) | __riscv_vlseg7e16ff_v_u16m1(__VA_ARGS__) |
| #define vlseg7e16ff_v_u16m1_m | ( | ... | ) | __riscv_vlseg7e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_u16mf2 | ( | ... | ) | __riscv_vlseg7e16ff_v_u16mf2(__VA_ARGS__) |
| #define vlseg7e16ff_v_u16mf2_m | ( | ... | ) | __riscv_vlseg7e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg7e16ff_v_u16mf4 | ( | ... | ) | __riscv_vlseg7e16ff_v_u16mf4(__VA_ARGS__) |
| #define vlseg7e16ff_v_u16mf4_m | ( | ... | ) | __riscv_vlseg7e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg7e32_v_f32m1 | ( | ... | ) | __riscv_vlseg7e32_v_f32m1(__VA_ARGS__) |
| #define vlseg7e32_v_f32m1_m | ( | ... | ) | __riscv_vlseg7e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg7e32_v_f32mf2 | ( | ... | ) | __riscv_vlseg7e32_v_f32mf2(__VA_ARGS__) |
| #define vlseg7e32_v_f32mf2_m | ( | ... | ) | __riscv_vlseg7e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg7e32_v_i32m1 | ( | ... | ) | __riscv_vlseg7e32_v_i32m1(__VA_ARGS__) |
| #define vlseg7e32_v_i32m1_m | ( | ... | ) | __riscv_vlseg7e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg7e32_v_i32mf2 | ( | ... | ) | __riscv_vlseg7e32_v_i32mf2(__VA_ARGS__) |
| #define vlseg7e32_v_i32mf2_m | ( | ... | ) | __riscv_vlseg7e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg7e32_v_u32m1 | ( | ... | ) | __riscv_vlseg7e32_v_u32m1(__VA_ARGS__) |
| #define vlseg7e32_v_u32m1_m | ( | ... | ) | __riscv_vlseg7e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg7e32_v_u32mf2 | ( | ... | ) | __riscv_vlseg7e32_v_u32mf2(__VA_ARGS__) |
| #define vlseg7e32_v_u32mf2_m | ( | ... | ) | __riscv_vlseg7e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg7e32ff_v_f32m1 | ( | ... | ) | __riscv_vlseg7e32ff_v_f32m1(__VA_ARGS__) |
| #define vlseg7e32ff_v_f32m1_m | ( | ... | ) | __riscv_vlseg7e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg7e32ff_v_f32mf2 | ( | ... | ) | __riscv_vlseg7e32ff_v_f32mf2(__VA_ARGS__) |
| #define vlseg7e32ff_v_f32mf2_m | ( | ... | ) | __riscv_vlseg7e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg7e32ff_v_i32m1 | ( | ... | ) | __riscv_vlseg7e32ff_v_i32m1(__VA_ARGS__) |
| #define vlseg7e32ff_v_i32m1_m | ( | ... | ) | __riscv_vlseg7e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg7e32ff_v_i32mf2 | ( | ... | ) | __riscv_vlseg7e32ff_v_i32mf2(__VA_ARGS__) |
| #define vlseg7e32ff_v_i32mf2_m | ( | ... | ) | __riscv_vlseg7e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg7e32ff_v_u32m1 | ( | ... | ) | __riscv_vlseg7e32ff_v_u32m1(__VA_ARGS__) |
| #define vlseg7e32ff_v_u32m1_m | ( | ... | ) | __riscv_vlseg7e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg7e32ff_v_u32mf2 | ( | ... | ) | __riscv_vlseg7e32ff_v_u32mf2(__VA_ARGS__) |
| #define vlseg7e32ff_v_u32mf2_m | ( | ... | ) | __riscv_vlseg7e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg7e64_v_f64m1 | ( | ... | ) | __riscv_vlseg7e64_v_f64m1(__VA_ARGS__) |
| #define vlseg7e64_v_f64m1_m | ( | ... | ) | __riscv_vlseg7e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg7e64_v_i64m1 | ( | ... | ) | __riscv_vlseg7e64_v_i64m1(__VA_ARGS__) |
| #define vlseg7e64_v_i64m1_m | ( | ... | ) | __riscv_vlseg7e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg7e64_v_u64m1 | ( | ... | ) | __riscv_vlseg7e64_v_u64m1(__VA_ARGS__) |
| #define vlseg7e64_v_u64m1_m | ( | ... | ) | __riscv_vlseg7e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg7e64ff_v_f64m1 | ( | ... | ) | __riscv_vlseg7e64ff_v_f64m1(__VA_ARGS__) |
| #define vlseg7e64ff_v_f64m1_m | ( | ... | ) | __riscv_vlseg7e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg7e64ff_v_i64m1 | ( | ... | ) | __riscv_vlseg7e64ff_v_i64m1(__VA_ARGS__) |
| #define vlseg7e64ff_v_i64m1_m | ( | ... | ) | __riscv_vlseg7e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg7e64ff_v_u64m1 | ( | ... | ) | __riscv_vlseg7e64ff_v_u64m1(__VA_ARGS__) |
| #define vlseg7e64ff_v_u64m1_m | ( | ... | ) | __riscv_vlseg7e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg7e8_v_i8m1 | ( | ... | ) | __riscv_vlseg7e8_v_i8m1(__VA_ARGS__) |
| #define vlseg7e8_v_i8m1_m | ( | ... | ) | __riscv_vlseg7e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg7e8_v_i8mf2 | ( | ... | ) | __riscv_vlseg7e8_v_i8mf2(__VA_ARGS__) |
| #define vlseg7e8_v_i8mf2_m | ( | ... | ) | __riscv_vlseg7e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg7e8_v_i8mf4 | ( | ... | ) | __riscv_vlseg7e8_v_i8mf4(__VA_ARGS__) |
| #define vlseg7e8_v_i8mf4_m | ( | ... | ) | __riscv_vlseg7e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg7e8_v_i8mf8 | ( | ... | ) | __riscv_vlseg7e8_v_i8mf8(__VA_ARGS__) |
| #define vlseg7e8_v_i8mf8_m | ( | ... | ) | __riscv_vlseg7e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg7e8_v_u8m1 | ( | ... | ) | __riscv_vlseg7e8_v_u8m1(__VA_ARGS__) |
| #define vlseg7e8_v_u8m1_m | ( | ... | ) | __riscv_vlseg7e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg7e8_v_u8mf2 | ( | ... | ) | __riscv_vlseg7e8_v_u8mf2(__VA_ARGS__) |
| #define vlseg7e8_v_u8mf2_m | ( | ... | ) | __riscv_vlseg7e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg7e8_v_u8mf4 | ( | ... | ) | __riscv_vlseg7e8_v_u8mf4(__VA_ARGS__) |
| #define vlseg7e8_v_u8mf4_m | ( | ... | ) | __riscv_vlseg7e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg7e8_v_u8mf8 | ( | ... | ) | __riscv_vlseg7e8_v_u8mf8(__VA_ARGS__) |
| #define vlseg7e8_v_u8mf8_m | ( | ... | ) | __riscv_vlseg7e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg7e8ff_v_i8m1 | ( | ... | ) | __riscv_vlseg7e8ff_v_i8m1(__VA_ARGS__) |
| #define vlseg7e8ff_v_i8m1_m | ( | ... | ) | __riscv_vlseg7e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg7e8ff_v_i8mf2 | ( | ... | ) | __riscv_vlseg7e8ff_v_i8mf2(__VA_ARGS__) |
| #define vlseg7e8ff_v_i8mf2_m | ( | ... | ) | __riscv_vlseg7e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg7e8ff_v_i8mf4 | ( | ... | ) | __riscv_vlseg7e8ff_v_i8mf4(__VA_ARGS__) |
| #define vlseg7e8ff_v_i8mf4_m | ( | ... | ) | __riscv_vlseg7e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg7e8ff_v_i8mf8 | ( | ... | ) | __riscv_vlseg7e8ff_v_i8mf8(__VA_ARGS__) |
| #define vlseg7e8ff_v_i8mf8_m | ( | ... | ) | __riscv_vlseg7e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg7e8ff_v_u8m1 | ( | ... | ) | __riscv_vlseg7e8ff_v_u8m1(__VA_ARGS__) |
| #define vlseg7e8ff_v_u8m1_m | ( | ... | ) | __riscv_vlseg7e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg7e8ff_v_u8mf2 | ( | ... | ) | __riscv_vlseg7e8ff_v_u8mf2(__VA_ARGS__) |
| #define vlseg7e8ff_v_u8mf2_m | ( | ... | ) | __riscv_vlseg7e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg7e8ff_v_u8mf4 | ( | ... | ) | __riscv_vlseg7e8ff_v_u8mf4(__VA_ARGS__) |
| #define vlseg7e8ff_v_u8mf4_m | ( | ... | ) | __riscv_vlseg7e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg7e8ff_v_u8mf8 | ( | ... | ) | __riscv_vlseg7e8ff_v_u8mf8(__VA_ARGS__) |
| #define vlseg7e8ff_v_u8mf8_m | ( | ... | ) | __riscv_vlseg7e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_f16m1 | ( | ... | ) | __riscv_vlseg8e16_v_f16m1(__VA_ARGS__) |
| #define vlseg8e16_v_f16m1_m | ( | ... | ) | __riscv_vlseg8e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_f16mf2 | ( | ... | ) | __riscv_vlseg8e16_v_f16mf2(__VA_ARGS__) |
| #define vlseg8e16_v_f16mf2_m | ( | ... | ) | __riscv_vlseg8e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_f16mf4 | ( | ... | ) | __riscv_vlseg8e16_v_f16mf4(__VA_ARGS__) |
| #define vlseg8e16_v_f16mf4_m | ( | ... | ) | __riscv_vlseg8e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_i16m1 | ( | ... | ) | __riscv_vlseg8e16_v_i16m1(__VA_ARGS__) |
| #define vlseg8e16_v_i16m1_m | ( | ... | ) | __riscv_vlseg8e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_i16mf2 | ( | ... | ) | __riscv_vlseg8e16_v_i16mf2(__VA_ARGS__) |
| #define vlseg8e16_v_i16mf2_m | ( | ... | ) | __riscv_vlseg8e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_i16mf4 | ( | ... | ) | __riscv_vlseg8e16_v_i16mf4(__VA_ARGS__) |
| #define vlseg8e16_v_i16mf4_m | ( | ... | ) | __riscv_vlseg8e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_u16m1 | ( | ... | ) | __riscv_vlseg8e16_v_u16m1(__VA_ARGS__) |
| #define vlseg8e16_v_u16m1_m | ( | ... | ) | __riscv_vlseg8e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_u16mf2 | ( | ... | ) | __riscv_vlseg8e16_v_u16mf2(__VA_ARGS__) |
| #define vlseg8e16_v_u16mf2_m | ( | ... | ) | __riscv_vlseg8e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg8e16_v_u16mf4 | ( | ... | ) | __riscv_vlseg8e16_v_u16mf4(__VA_ARGS__) |
| #define vlseg8e16_v_u16mf4_m | ( | ... | ) | __riscv_vlseg8e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_f16m1 | ( | ... | ) | __riscv_vlseg8e16ff_v_f16m1(__VA_ARGS__) |
| #define vlseg8e16ff_v_f16m1_m | ( | ... | ) | __riscv_vlseg8e16ff_v_f16m1_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_f16mf2 | ( | ... | ) | __riscv_vlseg8e16ff_v_f16mf2(__VA_ARGS__) |
| #define vlseg8e16ff_v_f16mf2_m | ( | ... | ) | __riscv_vlseg8e16ff_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_f16mf4 | ( | ... | ) | __riscv_vlseg8e16ff_v_f16mf4(__VA_ARGS__) |
| #define vlseg8e16ff_v_f16mf4_m | ( | ... | ) | __riscv_vlseg8e16ff_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_i16m1 | ( | ... | ) | __riscv_vlseg8e16ff_v_i16m1(__VA_ARGS__) |
| #define vlseg8e16ff_v_i16m1_m | ( | ... | ) | __riscv_vlseg8e16ff_v_i16m1_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_i16mf2 | ( | ... | ) | __riscv_vlseg8e16ff_v_i16mf2(__VA_ARGS__) |
| #define vlseg8e16ff_v_i16mf2_m | ( | ... | ) | __riscv_vlseg8e16ff_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_i16mf4 | ( | ... | ) | __riscv_vlseg8e16ff_v_i16mf4(__VA_ARGS__) |
| #define vlseg8e16ff_v_i16mf4_m | ( | ... | ) | __riscv_vlseg8e16ff_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_u16m1 | ( | ... | ) | __riscv_vlseg8e16ff_v_u16m1(__VA_ARGS__) |
| #define vlseg8e16ff_v_u16m1_m | ( | ... | ) | __riscv_vlseg8e16ff_v_u16m1_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_u16mf2 | ( | ... | ) | __riscv_vlseg8e16ff_v_u16mf2(__VA_ARGS__) |
| #define vlseg8e16ff_v_u16mf2_m | ( | ... | ) | __riscv_vlseg8e16ff_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlseg8e16ff_v_u16mf4 | ( | ... | ) | __riscv_vlseg8e16ff_v_u16mf4(__VA_ARGS__) |
| #define vlseg8e16ff_v_u16mf4_m | ( | ... | ) | __riscv_vlseg8e16ff_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlseg8e32_v_f32m1 | ( | ... | ) | __riscv_vlseg8e32_v_f32m1(__VA_ARGS__) |
| #define vlseg8e32_v_f32m1_m | ( | ... | ) | __riscv_vlseg8e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg8e32_v_f32mf2 | ( | ... | ) | __riscv_vlseg8e32_v_f32mf2(__VA_ARGS__) |
| #define vlseg8e32_v_f32mf2_m | ( | ... | ) | __riscv_vlseg8e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg8e32_v_i32m1 | ( | ... | ) | __riscv_vlseg8e32_v_i32m1(__VA_ARGS__) |
| #define vlseg8e32_v_i32m1_m | ( | ... | ) | __riscv_vlseg8e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg8e32_v_i32mf2 | ( | ... | ) | __riscv_vlseg8e32_v_i32mf2(__VA_ARGS__) |
| #define vlseg8e32_v_i32mf2_m | ( | ... | ) | __riscv_vlseg8e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg8e32_v_u32m1 | ( | ... | ) | __riscv_vlseg8e32_v_u32m1(__VA_ARGS__) |
| #define vlseg8e32_v_u32m1_m | ( | ... | ) | __riscv_vlseg8e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg8e32_v_u32mf2 | ( | ... | ) | __riscv_vlseg8e32_v_u32mf2(__VA_ARGS__) |
| #define vlseg8e32_v_u32mf2_m | ( | ... | ) | __riscv_vlseg8e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg8e32ff_v_f32m1 | ( | ... | ) | __riscv_vlseg8e32ff_v_f32m1(__VA_ARGS__) |
| #define vlseg8e32ff_v_f32m1_m | ( | ... | ) | __riscv_vlseg8e32ff_v_f32m1_tumu(__VA_ARGS__) |
| #define vlseg8e32ff_v_f32mf2 | ( | ... | ) | __riscv_vlseg8e32ff_v_f32mf2(__VA_ARGS__) |
| #define vlseg8e32ff_v_f32mf2_m | ( | ... | ) | __riscv_vlseg8e32ff_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlseg8e32ff_v_i32m1 | ( | ... | ) | __riscv_vlseg8e32ff_v_i32m1(__VA_ARGS__) |
| #define vlseg8e32ff_v_i32m1_m | ( | ... | ) | __riscv_vlseg8e32ff_v_i32m1_tumu(__VA_ARGS__) |
| #define vlseg8e32ff_v_i32mf2 | ( | ... | ) | __riscv_vlseg8e32ff_v_i32mf2(__VA_ARGS__) |
| #define vlseg8e32ff_v_i32mf2_m | ( | ... | ) | __riscv_vlseg8e32ff_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlseg8e32ff_v_u32m1 | ( | ... | ) | __riscv_vlseg8e32ff_v_u32m1(__VA_ARGS__) |
| #define vlseg8e32ff_v_u32m1_m | ( | ... | ) | __riscv_vlseg8e32ff_v_u32m1_tumu(__VA_ARGS__) |
| #define vlseg8e32ff_v_u32mf2 | ( | ... | ) | __riscv_vlseg8e32ff_v_u32mf2(__VA_ARGS__) |
| #define vlseg8e32ff_v_u32mf2_m | ( | ... | ) | __riscv_vlseg8e32ff_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlseg8e64_v_f64m1 | ( | ... | ) | __riscv_vlseg8e64_v_f64m1(__VA_ARGS__) |
| #define vlseg8e64_v_f64m1_m | ( | ... | ) | __riscv_vlseg8e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg8e64_v_i64m1 | ( | ... | ) | __riscv_vlseg8e64_v_i64m1(__VA_ARGS__) |
| #define vlseg8e64_v_i64m1_m | ( | ... | ) | __riscv_vlseg8e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg8e64_v_u64m1 | ( | ... | ) | __riscv_vlseg8e64_v_u64m1(__VA_ARGS__) |
| #define vlseg8e64_v_u64m1_m | ( | ... | ) | __riscv_vlseg8e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg8e64ff_v_f64m1 | ( | ... | ) | __riscv_vlseg8e64ff_v_f64m1(__VA_ARGS__) |
| #define vlseg8e64ff_v_f64m1_m | ( | ... | ) | __riscv_vlseg8e64ff_v_f64m1_tumu(__VA_ARGS__) |
| #define vlseg8e64ff_v_i64m1 | ( | ... | ) | __riscv_vlseg8e64ff_v_i64m1(__VA_ARGS__) |
| #define vlseg8e64ff_v_i64m1_m | ( | ... | ) | __riscv_vlseg8e64ff_v_i64m1_tumu(__VA_ARGS__) |
| #define vlseg8e64ff_v_u64m1 | ( | ... | ) | __riscv_vlseg8e64ff_v_u64m1(__VA_ARGS__) |
| #define vlseg8e64ff_v_u64m1_m | ( | ... | ) | __riscv_vlseg8e64ff_v_u64m1_tumu(__VA_ARGS__) |
| #define vlseg8e8_v_i8m1 | ( | ... | ) | __riscv_vlseg8e8_v_i8m1(__VA_ARGS__) |
| #define vlseg8e8_v_i8m1_m | ( | ... | ) | __riscv_vlseg8e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg8e8_v_i8mf2 | ( | ... | ) | __riscv_vlseg8e8_v_i8mf2(__VA_ARGS__) |
| #define vlseg8e8_v_i8mf2_m | ( | ... | ) | __riscv_vlseg8e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg8e8_v_i8mf4 | ( | ... | ) | __riscv_vlseg8e8_v_i8mf4(__VA_ARGS__) |
| #define vlseg8e8_v_i8mf4_m | ( | ... | ) | __riscv_vlseg8e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg8e8_v_i8mf8 | ( | ... | ) | __riscv_vlseg8e8_v_i8mf8(__VA_ARGS__) |
| #define vlseg8e8_v_i8mf8_m | ( | ... | ) | __riscv_vlseg8e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg8e8_v_u8m1 | ( | ... | ) | __riscv_vlseg8e8_v_u8m1(__VA_ARGS__) |
| #define vlseg8e8_v_u8m1_m | ( | ... | ) | __riscv_vlseg8e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg8e8_v_u8mf2 | ( | ... | ) | __riscv_vlseg8e8_v_u8mf2(__VA_ARGS__) |
| #define vlseg8e8_v_u8mf2_m | ( | ... | ) | __riscv_vlseg8e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg8e8_v_u8mf4 | ( | ... | ) | __riscv_vlseg8e8_v_u8mf4(__VA_ARGS__) |
| #define vlseg8e8_v_u8mf4_m | ( | ... | ) | __riscv_vlseg8e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg8e8_v_u8mf8 | ( | ... | ) | __riscv_vlseg8e8_v_u8mf8(__VA_ARGS__) |
| #define vlseg8e8_v_u8mf8_m | ( | ... | ) | __riscv_vlseg8e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlseg8e8ff_v_i8m1 | ( | ... | ) | __riscv_vlseg8e8ff_v_i8m1(__VA_ARGS__) |
| #define vlseg8e8ff_v_i8m1_m | ( | ... | ) | __riscv_vlseg8e8ff_v_i8m1_tumu(__VA_ARGS__) |
| #define vlseg8e8ff_v_i8mf2 | ( | ... | ) | __riscv_vlseg8e8ff_v_i8mf2(__VA_ARGS__) |
| #define vlseg8e8ff_v_i8mf2_m | ( | ... | ) | __riscv_vlseg8e8ff_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlseg8e8ff_v_i8mf4 | ( | ... | ) | __riscv_vlseg8e8ff_v_i8mf4(__VA_ARGS__) |
| #define vlseg8e8ff_v_i8mf4_m | ( | ... | ) | __riscv_vlseg8e8ff_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlseg8e8ff_v_i8mf8 | ( | ... | ) | __riscv_vlseg8e8ff_v_i8mf8(__VA_ARGS__) |
| #define vlseg8e8ff_v_i8mf8_m | ( | ... | ) | __riscv_vlseg8e8ff_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlseg8e8ff_v_u8m1 | ( | ... | ) | __riscv_vlseg8e8ff_v_u8m1(__VA_ARGS__) |
| #define vlseg8e8ff_v_u8m1_m | ( | ... | ) | __riscv_vlseg8e8ff_v_u8m1_tumu(__VA_ARGS__) |
| #define vlseg8e8ff_v_u8mf2 | ( | ... | ) | __riscv_vlseg8e8ff_v_u8mf2(__VA_ARGS__) |
| #define vlseg8e8ff_v_u8mf2_m | ( | ... | ) | __riscv_vlseg8e8ff_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlseg8e8ff_v_u8mf4 | ( | ... | ) | __riscv_vlseg8e8ff_v_u8mf4(__VA_ARGS__) |
| #define vlseg8e8ff_v_u8mf4_m | ( | ... | ) | __riscv_vlseg8e8ff_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlseg8e8ff_v_u8mf8 | ( | ... | ) | __riscv_vlseg8e8ff_v_u8mf8(__VA_ARGS__) |
| #define vlseg8e8ff_v_u8mf8_m | ( | ... | ) | __riscv_vlseg8e8ff_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_f16m1 | ( | ... | ) | __riscv_vlsseg2e16_v_f16m1(__VA_ARGS__) |
| #define vlsseg2e16_v_f16m1_m | ( | ... | ) | __riscv_vlsseg2e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_f16m2 | ( | ... | ) | __riscv_vlsseg2e16_v_f16m2(__VA_ARGS__) |
| #define vlsseg2e16_v_f16m2_m | ( | ... | ) | __riscv_vlsseg2e16_v_f16m2_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_f16m4 | ( | ... | ) | __riscv_vlsseg2e16_v_f16m4(__VA_ARGS__) |
| #define vlsseg2e16_v_f16m4_m | ( | ... | ) | __riscv_vlsseg2e16_v_f16m4_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_f16mf2 | ( | ... | ) | __riscv_vlsseg2e16_v_f16mf2(__VA_ARGS__) |
| #define vlsseg2e16_v_f16mf2_m | ( | ... | ) | __riscv_vlsseg2e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_f16mf4 | ( | ... | ) | __riscv_vlsseg2e16_v_f16mf4(__VA_ARGS__) |
| #define vlsseg2e16_v_f16mf4_m | ( | ... | ) | __riscv_vlsseg2e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_i16m1 | ( | ... | ) | __riscv_vlsseg2e16_v_i16m1(__VA_ARGS__) |
| #define vlsseg2e16_v_i16m1_m | ( | ... | ) | __riscv_vlsseg2e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_i16m2 | ( | ... | ) | __riscv_vlsseg2e16_v_i16m2(__VA_ARGS__) |
| #define vlsseg2e16_v_i16m2_m | ( | ... | ) | __riscv_vlsseg2e16_v_i16m2_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_i16m4 | ( | ... | ) | __riscv_vlsseg2e16_v_i16m4(__VA_ARGS__) |
| #define vlsseg2e16_v_i16m4_m | ( | ... | ) | __riscv_vlsseg2e16_v_i16m4_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_i16mf2 | ( | ... | ) | __riscv_vlsseg2e16_v_i16mf2(__VA_ARGS__) |
| #define vlsseg2e16_v_i16mf2_m | ( | ... | ) | __riscv_vlsseg2e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_i16mf4 | ( | ... | ) | __riscv_vlsseg2e16_v_i16mf4(__VA_ARGS__) |
| #define vlsseg2e16_v_i16mf4_m | ( | ... | ) | __riscv_vlsseg2e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_u16m1 | ( | ... | ) | __riscv_vlsseg2e16_v_u16m1(__VA_ARGS__) |
| #define vlsseg2e16_v_u16m1_m | ( | ... | ) | __riscv_vlsseg2e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_u16m2 | ( | ... | ) | __riscv_vlsseg2e16_v_u16m2(__VA_ARGS__) |
| #define vlsseg2e16_v_u16m2_m | ( | ... | ) | __riscv_vlsseg2e16_v_u16m2_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_u16m4 | ( | ... | ) | __riscv_vlsseg2e16_v_u16m4(__VA_ARGS__) |
| #define vlsseg2e16_v_u16m4_m | ( | ... | ) | __riscv_vlsseg2e16_v_u16m4_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_u16mf2 | ( | ... | ) | __riscv_vlsseg2e16_v_u16mf2(__VA_ARGS__) |
| #define vlsseg2e16_v_u16mf2_m | ( | ... | ) | __riscv_vlsseg2e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlsseg2e16_v_u16mf4 | ( | ... | ) | __riscv_vlsseg2e16_v_u16mf4(__VA_ARGS__) |
| #define vlsseg2e16_v_u16mf4_m | ( | ... | ) | __riscv_vlsseg2e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_f32m1 | ( | ... | ) | __riscv_vlsseg2e32_v_f32m1(__VA_ARGS__) |
| #define vlsseg2e32_v_f32m1_m | ( | ... | ) | __riscv_vlsseg2e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_f32m2 | ( | ... | ) | __riscv_vlsseg2e32_v_f32m2(__VA_ARGS__) |
| #define vlsseg2e32_v_f32m2_m | ( | ... | ) | __riscv_vlsseg2e32_v_f32m2_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_f32m4 | ( | ... | ) | __riscv_vlsseg2e32_v_f32m4(__VA_ARGS__) |
| #define vlsseg2e32_v_f32m4_m | ( | ... | ) | __riscv_vlsseg2e32_v_f32m4_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_f32mf2 | ( | ... | ) | __riscv_vlsseg2e32_v_f32mf2(__VA_ARGS__) |
| #define vlsseg2e32_v_f32mf2_m | ( | ... | ) | __riscv_vlsseg2e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_i32m1 | ( | ... | ) | __riscv_vlsseg2e32_v_i32m1(__VA_ARGS__) |
| #define vlsseg2e32_v_i32m1_m | ( | ... | ) | __riscv_vlsseg2e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_i32m2 | ( | ... | ) | __riscv_vlsseg2e32_v_i32m2(__VA_ARGS__) |
| #define vlsseg2e32_v_i32m2_m | ( | ... | ) | __riscv_vlsseg2e32_v_i32m2_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_i32m4 | ( | ... | ) | __riscv_vlsseg2e32_v_i32m4(__VA_ARGS__) |
| #define vlsseg2e32_v_i32m4_m | ( | ... | ) | __riscv_vlsseg2e32_v_i32m4_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_i32mf2 | ( | ... | ) | __riscv_vlsseg2e32_v_i32mf2(__VA_ARGS__) |
| #define vlsseg2e32_v_i32mf2_m | ( | ... | ) | __riscv_vlsseg2e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_u32m1 | ( | ... | ) | __riscv_vlsseg2e32_v_u32m1(__VA_ARGS__) |
| #define vlsseg2e32_v_u32m1_m | ( | ... | ) | __riscv_vlsseg2e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_u32m2 | ( | ... | ) | __riscv_vlsseg2e32_v_u32m2(__VA_ARGS__) |
| #define vlsseg2e32_v_u32m2_m | ( | ... | ) | __riscv_vlsseg2e32_v_u32m2_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_u32m4 | ( | ... | ) | __riscv_vlsseg2e32_v_u32m4(__VA_ARGS__) |
| #define vlsseg2e32_v_u32m4_m | ( | ... | ) | __riscv_vlsseg2e32_v_u32m4_tumu(__VA_ARGS__) |
| #define vlsseg2e32_v_u32mf2 | ( | ... | ) | __riscv_vlsseg2e32_v_u32mf2(__VA_ARGS__) |
| #define vlsseg2e32_v_u32mf2_m | ( | ... | ) | __riscv_vlsseg2e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_f64m1 | ( | ... | ) | __riscv_vlsseg2e64_v_f64m1(__VA_ARGS__) |
| #define vlsseg2e64_v_f64m1_m | ( | ... | ) | __riscv_vlsseg2e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_f64m2 | ( | ... | ) | __riscv_vlsseg2e64_v_f64m2(__VA_ARGS__) |
| #define vlsseg2e64_v_f64m2_m | ( | ... | ) | __riscv_vlsseg2e64_v_f64m2_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_f64m4 | ( | ... | ) | __riscv_vlsseg2e64_v_f64m4(__VA_ARGS__) |
| #define vlsseg2e64_v_f64m4_m | ( | ... | ) | __riscv_vlsseg2e64_v_f64m4_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_i64m1 | ( | ... | ) | __riscv_vlsseg2e64_v_i64m1(__VA_ARGS__) |
| #define vlsseg2e64_v_i64m1_m | ( | ... | ) | __riscv_vlsseg2e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_i64m2 | ( | ... | ) | __riscv_vlsseg2e64_v_i64m2(__VA_ARGS__) |
| #define vlsseg2e64_v_i64m2_m | ( | ... | ) | __riscv_vlsseg2e64_v_i64m2_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_i64m4 | ( | ... | ) | __riscv_vlsseg2e64_v_i64m4(__VA_ARGS__) |
| #define vlsseg2e64_v_i64m4_m | ( | ... | ) | __riscv_vlsseg2e64_v_i64m4_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_u64m1 | ( | ... | ) | __riscv_vlsseg2e64_v_u64m1(__VA_ARGS__) |
| #define vlsseg2e64_v_u64m1_m | ( | ... | ) | __riscv_vlsseg2e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_u64m2 | ( | ... | ) | __riscv_vlsseg2e64_v_u64m2(__VA_ARGS__) |
| #define vlsseg2e64_v_u64m2_m | ( | ... | ) | __riscv_vlsseg2e64_v_u64m2_tumu(__VA_ARGS__) |
| #define vlsseg2e64_v_u64m4 | ( | ... | ) | __riscv_vlsseg2e64_v_u64m4(__VA_ARGS__) |
| #define vlsseg2e64_v_u64m4_m | ( | ... | ) | __riscv_vlsseg2e64_v_u64m4_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_i8m1 | ( | ... | ) | __riscv_vlsseg2e8_v_i8m1(__VA_ARGS__) |
| #define vlsseg2e8_v_i8m1_m | ( | ... | ) | __riscv_vlsseg2e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_i8m2 | ( | ... | ) | __riscv_vlsseg2e8_v_i8m2(__VA_ARGS__) |
| #define vlsseg2e8_v_i8m2_m | ( | ... | ) | __riscv_vlsseg2e8_v_i8m2_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_i8m4 | ( | ... | ) | __riscv_vlsseg2e8_v_i8m4(__VA_ARGS__) |
| #define vlsseg2e8_v_i8m4_m | ( | ... | ) | __riscv_vlsseg2e8_v_i8m4_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_i8mf2 | ( | ... | ) | __riscv_vlsseg2e8_v_i8mf2(__VA_ARGS__) |
| #define vlsseg2e8_v_i8mf2_m | ( | ... | ) | __riscv_vlsseg2e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_i8mf4 | ( | ... | ) | __riscv_vlsseg2e8_v_i8mf4(__VA_ARGS__) |
| #define vlsseg2e8_v_i8mf4_m | ( | ... | ) | __riscv_vlsseg2e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_i8mf8 | ( | ... | ) | __riscv_vlsseg2e8_v_i8mf8(__VA_ARGS__) |
| #define vlsseg2e8_v_i8mf8_m | ( | ... | ) | __riscv_vlsseg2e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_u8m1 | ( | ... | ) | __riscv_vlsseg2e8_v_u8m1(__VA_ARGS__) |
| #define vlsseg2e8_v_u8m1_m | ( | ... | ) | __riscv_vlsseg2e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_u8m2 | ( | ... | ) | __riscv_vlsseg2e8_v_u8m2(__VA_ARGS__) |
| #define vlsseg2e8_v_u8m2_m | ( | ... | ) | __riscv_vlsseg2e8_v_u8m2_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_u8m4 | ( | ... | ) | __riscv_vlsseg2e8_v_u8m4(__VA_ARGS__) |
| #define vlsseg2e8_v_u8m4_m | ( | ... | ) | __riscv_vlsseg2e8_v_u8m4_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_u8mf2 | ( | ... | ) | __riscv_vlsseg2e8_v_u8mf2(__VA_ARGS__) |
| #define vlsseg2e8_v_u8mf2_m | ( | ... | ) | __riscv_vlsseg2e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_u8mf4 | ( | ... | ) | __riscv_vlsseg2e8_v_u8mf4(__VA_ARGS__) |
| #define vlsseg2e8_v_u8mf4_m | ( | ... | ) | __riscv_vlsseg2e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlsseg2e8_v_u8mf8 | ( | ... | ) | __riscv_vlsseg2e8_v_u8mf8(__VA_ARGS__) |
| #define vlsseg2e8_v_u8mf8_m | ( | ... | ) | __riscv_vlsseg2e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_f16m1 | ( | ... | ) | __riscv_vlsseg3e16_v_f16m1(__VA_ARGS__) |
| #define vlsseg3e16_v_f16m1_m | ( | ... | ) | __riscv_vlsseg3e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_f16m2 | ( | ... | ) | __riscv_vlsseg3e16_v_f16m2(__VA_ARGS__) |
| #define vlsseg3e16_v_f16m2_m | ( | ... | ) | __riscv_vlsseg3e16_v_f16m2_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_f16mf2 | ( | ... | ) | __riscv_vlsseg3e16_v_f16mf2(__VA_ARGS__) |
| #define vlsseg3e16_v_f16mf2_m | ( | ... | ) | __riscv_vlsseg3e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_f16mf4 | ( | ... | ) | __riscv_vlsseg3e16_v_f16mf4(__VA_ARGS__) |
| #define vlsseg3e16_v_f16mf4_m | ( | ... | ) | __riscv_vlsseg3e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_i16m1 | ( | ... | ) | __riscv_vlsseg3e16_v_i16m1(__VA_ARGS__) |
| #define vlsseg3e16_v_i16m1_m | ( | ... | ) | __riscv_vlsseg3e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_i16m2 | ( | ... | ) | __riscv_vlsseg3e16_v_i16m2(__VA_ARGS__) |
| #define vlsseg3e16_v_i16m2_m | ( | ... | ) | __riscv_vlsseg3e16_v_i16m2_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_i16mf2 | ( | ... | ) | __riscv_vlsseg3e16_v_i16mf2(__VA_ARGS__) |
| #define vlsseg3e16_v_i16mf2_m | ( | ... | ) | __riscv_vlsseg3e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_i16mf4 | ( | ... | ) | __riscv_vlsseg3e16_v_i16mf4(__VA_ARGS__) |
| #define vlsseg3e16_v_i16mf4_m | ( | ... | ) | __riscv_vlsseg3e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_u16m1 | ( | ... | ) | __riscv_vlsseg3e16_v_u16m1(__VA_ARGS__) |
| #define vlsseg3e16_v_u16m1_m | ( | ... | ) | __riscv_vlsseg3e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_u16m2 | ( | ... | ) | __riscv_vlsseg3e16_v_u16m2(__VA_ARGS__) |
| #define vlsseg3e16_v_u16m2_m | ( | ... | ) | __riscv_vlsseg3e16_v_u16m2_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_u16mf2 | ( | ... | ) | __riscv_vlsseg3e16_v_u16mf2(__VA_ARGS__) |
| #define vlsseg3e16_v_u16mf2_m | ( | ... | ) | __riscv_vlsseg3e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlsseg3e16_v_u16mf4 | ( | ... | ) | __riscv_vlsseg3e16_v_u16mf4(__VA_ARGS__) |
| #define vlsseg3e16_v_u16mf4_m | ( | ... | ) | __riscv_vlsseg3e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_f32m1 | ( | ... | ) | __riscv_vlsseg3e32_v_f32m1(__VA_ARGS__) |
| #define vlsseg3e32_v_f32m1_m | ( | ... | ) | __riscv_vlsseg3e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_f32m2 | ( | ... | ) | __riscv_vlsseg3e32_v_f32m2(__VA_ARGS__) |
| #define vlsseg3e32_v_f32m2_m | ( | ... | ) | __riscv_vlsseg3e32_v_f32m2_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_f32mf2 | ( | ... | ) | __riscv_vlsseg3e32_v_f32mf2(__VA_ARGS__) |
| #define vlsseg3e32_v_f32mf2_m | ( | ... | ) | __riscv_vlsseg3e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_i32m1 | ( | ... | ) | __riscv_vlsseg3e32_v_i32m1(__VA_ARGS__) |
| #define vlsseg3e32_v_i32m1_m | ( | ... | ) | __riscv_vlsseg3e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_i32m2 | ( | ... | ) | __riscv_vlsseg3e32_v_i32m2(__VA_ARGS__) |
| #define vlsseg3e32_v_i32m2_m | ( | ... | ) | __riscv_vlsseg3e32_v_i32m2_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_i32mf2 | ( | ... | ) | __riscv_vlsseg3e32_v_i32mf2(__VA_ARGS__) |
| #define vlsseg3e32_v_i32mf2_m | ( | ... | ) | __riscv_vlsseg3e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_u32m1 | ( | ... | ) | __riscv_vlsseg3e32_v_u32m1(__VA_ARGS__) |
| #define vlsseg3e32_v_u32m1_m | ( | ... | ) | __riscv_vlsseg3e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_u32m2 | ( | ... | ) | __riscv_vlsseg3e32_v_u32m2(__VA_ARGS__) |
| #define vlsseg3e32_v_u32m2_m | ( | ... | ) | __riscv_vlsseg3e32_v_u32m2_tumu(__VA_ARGS__) |
| #define vlsseg3e32_v_u32mf2 | ( | ... | ) | __riscv_vlsseg3e32_v_u32mf2(__VA_ARGS__) |
| #define vlsseg3e32_v_u32mf2_m | ( | ... | ) | __riscv_vlsseg3e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlsseg3e64_v_f64m1 | ( | ... | ) | __riscv_vlsseg3e64_v_f64m1(__VA_ARGS__) |
| #define vlsseg3e64_v_f64m1_m | ( | ... | ) | __riscv_vlsseg3e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlsseg3e64_v_f64m2 | ( | ... | ) | __riscv_vlsseg3e64_v_f64m2(__VA_ARGS__) |
| #define vlsseg3e64_v_f64m2_m | ( | ... | ) | __riscv_vlsseg3e64_v_f64m2_tumu(__VA_ARGS__) |
| #define vlsseg3e64_v_i64m1 | ( | ... | ) | __riscv_vlsseg3e64_v_i64m1(__VA_ARGS__) |
| #define vlsseg3e64_v_i64m1_m | ( | ... | ) | __riscv_vlsseg3e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlsseg3e64_v_i64m2 | ( | ... | ) | __riscv_vlsseg3e64_v_i64m2(__VA_ARGS__) |
| #define vlsseg3e64_v_i64m2_m | ( | ... | ) | __riscv_vlsseg3e64_v_i64m2_tumu(__VA_ARGS__) |
| #define vlsseg3e64_v_u64m1 | ( | ... | ) | __riscv_vlsseg3e64_v_u64m1(__VA_ARGS__) |
| #define vlsseg3e64_v_u64m1_m | ( | ... | ) | __riscv_vlsseg3e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlsseg3e64_v_u64m2 | ( | ... | ) | __riscv_vlsseg3e64_v_u64m2(__VA_ARGS__) |
| #define vlsseg3e64_v_u64m2_m | ( | ... | ) | __riscv_vlsseg3e64_v_u64m2_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_i8m1 | ( | ... | ) | __riscv_vlsseg3e8_v_i8m1(__VA_ARGS__) |
| #define vlsseg3e8_v_i8m1_m | ( | ... | ) | __riscv_vlsseg3e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_i8m2 | ( | ... | ) | __riscv_vlsseg3e8_v_i8m2(__VA_ARGS__) |
| #define vlsseg3e8_v_i8m2_m | ( | ... | ) | __riscv_vlsseg3e8_v_i8m2_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_i8mf2 | ( | ... | ) | __riscv_vlsseg3e8_v_i8mf2(__VA_ARGS__) |
| #define vlsseg3e8_v_i8mf2_m | ( | ... | ) | __riscv_vlsseg3e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_i8mf4 | ( | ... | ) | __riscv_vlsseg3e8_v_i8mf4(__VA_ARGS__) |
| #define vlsseg3e8_v_i8mf4_m | ( | ... | ) | __riscv_vlsseg3e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_i8mf8 | ( | ... | ) | __riscv_vlsseg3e8_v_i8mf8(__VA_ARGS__) |
| #define vlsseg3e8_v_i8mf8_m | ( | ... | ) | __riscv_vlsseg3e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_u8m1 | ( | ... | ) | __riscv_vlsseg3e8_v_u8m1(__VA_ARGS__) |
| #define vlsseg3e8_v_u8m1_m | ( | ... | ) | __riscv_vlsseg3e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_u8m2 | ( | ... | ) | __riscv_vlsseg3e8_v_u8m2(__VA_ARGS__) |
| #define vlsseg3e8_v_u8m2_m | ( | ... | ) | __riscv_vlsseg3e8_v_u8m2_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_u8mf2 | ( | ... | ) | __riscv_vlsseg3e8_v_u8mf2(__VA_ARGS__) |
| #define vlsseg3e8_v_u8mf2_m | ( | ... | ) | __riscv_vlsseg3e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_u8mf4 | ( | ... | ) | __riscv_vlsseg3e8_v_u8mf4(__VA_ARGS__) |
| #define vlsseg3e8_v_u8mf4_m | ( | ... | ) | __riscv_vlsseg3e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlsseg3e8_v_u8mf8 | ( | ... | ) | __riscv_vlsseg3e8_v_u8mf8(__VA_ARGS__) |
| #define vlsseg3e8_v_u8mf8_m | ( | ... | ) | __riscv_vlsseg3e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_f16m1 | ( | ... | ) | __riscv_vlsseg4e16_v_f16m1(__VA_ARGS__) |
| #define vlsseg4e16_v_f16m1_m | ( | ... | ) | __riscv_vlsseg4e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_f16m2 | ( | ... | ) | __riscv_vlsseg4e16_v_f16m2(__VA_ARGS__) |
| #define vlsseg4e16_v_f16m2_m | ( | ... | ) | __riscv_vlsseg4e16_v_f16m2_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_f16mf2 | ( | ... | ) | __riscv_vlsseg4e16_v_f16mf2(__VA_ARGS__) |
| #define vlsseg4e16_v_f16mf2_m | ( | ... | ) | __riscv_vlsseg4e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_f16mf4 | ( | ... | ) | __riscv_vlsseg4e16_v_f16mf4(__VA_ARGS__) |
| #define vlsseg4e16_v_f16mf4_m | ( | ... | ) | __riscv_vlsseg4e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_i16m1 | ( | ... | ) | __riscv_vlsseg4e16_v_i16m1(__VA_ARGS__) |
| #define vlsseg4e16_v_i16m1_m | ( | ... | ) | __riscv_vlsseg4e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_i16m2 | ( | ... | ) | __riscv_vlsseg4e16_v_i16m2(__VA_ARGS__) |
| #define vlsseg4e16_v_i16m2_m | ( | ... | ) | __riscv_vlsseg4e16_v_i16m2_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_i16mf2 | ( | ... | ) | __riscv_vlsseg4e16_v_i16mf2(__VA_ARGS__) |
| #define vlsseg4e16_v_i16mf2_m | ( | ... | ) | __riscv_vlsseg4e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_i16mf4 | ( | ... | ) | __riscv_vlsseg4e16_v_i16mf4(__VA_ARGS__) |
| #define vlsseg4e16_v_i16mf4_m | ( | ... | ) | __riscv_vlsseg4e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_u16m1 | ( | ... | ) | __riscv_vlsseg4e16_v_u16m1(__VA_ARGS__) |
| #define vlsseg4e16_v_u16m1_m | ( | ... | ) | __riscv_vlsseg4e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_u16m2 | ( | ... | ) | __riscv_vlsseg4e16_v_u16m2(__VA_ARGS__) |
| #define vlsseg4e16_v_u16m2_m | ( | ... | ) | __riscv_vlsseg4e16_v_u16m2_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_u16mf2 | ( | ... | ) | __riscv_vlsseg4e16_v_u16mf2(__VA_ARGS__) |
| #define vlsseg4e16_v_u16mf2_m | ( | ... | ) | __riscv_vlsseg4e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlsseg4e16_v_u16mf4 | ( | ... | ) | __riscv_vlsseg4e16_v_u16mf4(__VA_ARGS__) |
| #define vlsseg4e16_v_u16mf4_m | ( | ... | ) | __riscv_vlsseg4e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_f32m1 | ( | ... | ) | __riscv_vlsseg4e32_v_f32m1(__VA_ARGS__) |
| #define vlsseg4e32_v_f32m1_m | ( | ... | ) | __riscv_vlsseg4e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_f32m2 | ( | ... | ) | __riscv_vlsseg4e32_v_f32m2(__VA_ARGS__) |
| #define vlsseg4e32_v_f32m2_m | ( | ... | ) | __riscv_vlsseg4e32_v_f32m2_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_f32mf2 | ( | ... | ) | __riscv_vlsseg4e32_v_f32mf2(__VA_ARGS__) |
| #define vlsseg4e32_v_f32mf2_m | ( | ... | ) | __riscv_vlsseg4e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_i32m1 | ( | ... | ) | __riscv_vlsseg4e32_v_i32m1(__VA_ARGS__) |
| #define vlsseg4e32_v_i32m1_m | ( | ... | ) | __riscv_vlsseg4e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_i32m2 | ( | ... | ) | __riscv_vlsseg4e32_v_i32m2(__VA_ARGS__) |
| #define vlsseg4e32_v_i32m2_m | ( | ... | ) | __riscv_vlsseg4e32_v_i32m2_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_i32mf2 | ( | ... | ) | __riscv_vlsseg4e32_v_i32mf2(__VA_ARGS__) |
| #define vlsseg4e32_v_i32mf2_m | ( | ... | ) | __riscv_vlsseg4e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_u32m1 | ( | ... | ) | __riscv_vlsseg4e32_v_u32m1(__VA_ARGS__) |
| #define vlsseg4e32_v_u32m1_m | ( | ... | ) | __riscv_vlsseg4e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_u32m2 | ( | ... | ) | __riscv_vlsseg4e32_v_u32m2(__VA_ARGS__) |
| #define vlsseg4e32_v_u32m2_m | ( | ... | ) | __riscv_vlsseg4e32_v_u32m2_tumu(__VA_ARGS__) |
| #define vlsseg4e32_v_u32mf2 | ( | ... | ) | __riscv_vlsseg4e32_v_u32mf2(__VA_ARGS__) |
| #define vlsseg4e32_v_u32mf2_m | ( | ... | ) | __riscv_vlsseg4e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlsseg4e64_v_f64m1 | ( | ... | ) | __riscv_vlsseg4e64_v_f64m1(__VA_ARGS__) |
| #define vlsseg4e64_v_f64m1_m | ( | ... | ) | __riscv_vlsseg4e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlsseg4e64_v_f64m2 | ( | ... | ) | __riscv_vlsseg4e64_v_f64m2(__VA_ARGS__) |
| #define vlsseg4e64_v_f64m2_m | ( | ... | ) | __riscv_vlsseg4e64_v_f64m2_tumu(__VA_ARGS__) |
| #define vlsseg4e64_v_i64m1 | ( | ... | ) | __riscv_vlsseg4e64_v_i64m1(__VA_ARGS__) |
| #define vlsseg4e64_v_i64m1_m | ( | ... | ) | __riscv_vlsseg4e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlsseg4e64_v_i64m2 | ( | ... | ) | __riscv_vlsseg4e64_v_i64m2(__VA_ARGS__) |
| #define vlsseg4e64_v_i64m2_m | ( | ... | ) | __riscv_vlsseg4e64_v_i64m2_tumu(__VA_ARGS__) |
| #define vlsseg4e64_v_u64m1 | ( | ... | ) | __riscv_vlsseg4e64_v_u64m1(__VA_ARGS__) |
| #define vlsseg4e64_v_u64m1_m | ( | ... | ) | __riscv_vlsseg4e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlsseg4e64_v_u64m2 | ( | ... | ) | __riscv_vlsseg4e64_v_u64m2(__VA_ARGS__) |
| #define vlsseg4e64_v_u64m2_m | ( | ... | ) | __riscv_vlsseg4e64_v_u64m2_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_i8m1 | ( | ... | ) | __riscv_vlsseg4e8_v_i8m1(__VA_ARGS__) |
| #define vlsseg4e8_v_i8m1_m | ( | ... | ) | __riscv_vlsseg4e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_i8m2 | ( | ... | ) | __riscv_vlsseg4e8_v_i8m2(__VA_ARGS__) |
| #define vlsseg4e8_v_i8m2_m | ( | ... | ) | __riscv_vlsseg4e8_v_i8m2_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_i8mf2 | ( | ... | ) | __riscv_vlsseg4e8_v_i8mf2(__VA_ARGS__) |
| #define vlsseg4e8_v_i8mf2_m | ( | ... | ) | __riscv_vlsseg4e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_i8mf4 | ( | ... | ) | __riscv_vlsseg4e8_v_i8mf4(__VA_ARGS__) |
| #define vlsseg4e8_v_i8mf4_m | ( | ... | ) | __riscv_vlsseg4e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_i8mf8 | ( | ... | ) | __riscv_vlsseg4e8_v_i8mf8(__VA_ARGS__) |
| #define vlsseg4e8_v_i8mf8_m | ( | ... | ) | __riscv_vlsseg4e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_u8m1 | ( | ... | ) | __riscv_vlsseg4e8_v_u8m1(__VA_ARGS__) |
| #define vlsseg4e8_v_u8m1_m | ( | ... | ) | __riscv_vlsseg4e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_u8m2 | ( | ... | ) | __riscv_vlsseg4e8_v_u8m2(__VA_ARGS__) |
| #define vlsseg4e8_v_u8m2_m | ( | ... | ) | __riscv_vlsseg4e8_v_u8m2_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_u8mf2 | ( | ... | ) | __riscv_vlsseg4e8_v_u8mf2(__VA_ARGS__) |
| #define vlsseg4e8_v_u8mf2_m | ( | ... | ) | __riscv_vlsseg4e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_u8mf4 | ( | ... | ) | __riscv_vlsseg4e8_v_u8mf4(__VA_ARGS__) |
| #define vlsseg4e8_v_u8mf4_m | ( | ... | ) | __riscv_vlsseg4e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlsseg4e8_v_u8mf8 | ( | ... | ) | __riscv_vlsseg4e8_v_u8mf8(__VA_ARGS__) |
| #define vlsseg4e8_v_u8mf8_m | ( | ... | ) | __riscv_vlsseg4e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_f16m1 | ( | ... | ) | __riscv_vlsseg5e16_v_f16m1(__VA_ARGS__) |
| #define vlsseg5e16_v_f16m1_m | ( | ... | ) | __riscv_vlsseg5e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_f16mf2 | ( | ... | ) | __riscv_vlsseg5e16_v_f16mf2(__VA_ARGS__) |
| #define vlsseg5e16_v_f16mf2_m | ( | ... | ) | __riscv_vlsseg5e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_f16mf4 | ( | ... | ) | __riscv_vlsseg5e16_v_f16mf4(__VA_ARGS__) |
| #define vlsseg5e16_v_f16mf4_m | ( | ... | ) | __riscv_vlsseg5e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_i16m1 | ( | ... | ) | __riscv_vlsseg5e16_v_i16m1(__VA_ARGS__) |
| #define vlsseg5e16_v_i16m1_m | ( | ... | ) | __riscv_vlsseg5e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_i16mf2 | ( | ... | ) | __riscv_vlsseg5e16_v_i16mf2(__VA_ARGS__) |
| #define vlsseg5e16_v_i16mf2_m | ( | ... | ) | __riscv_vlsseg5e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_i16mf4 | ( | ... | ) | __riscv_vlsseg5e16_v_i16mf4(__VA_ARGS__) |
| #define vlsseg5e16_v_i16mf4_m | ( | ... | ) | __riscv_vlsseg5e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_u16m1 | ( | ... | ) | __riscv_vlsseg5e16_v_u16m1(__VA_ARGS__) |
| #define vlsseg5e16_v_u16m1_m | ( | ... | ) | __riscv_vlsseg5e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_u16mf2 | ( | ... | ) | __riscv_vlsseg5e16_v_u16mf2(__VA_ARGS__) |
| #define vlsseg5e16_v_u16mf2_m | ( | ... | ) | __riscv_vlsseg5e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlsseg5e16_v_u16mf4 | ( | ... | ) | __riscv_vlsseg5e16_v_u16mf4(__VA_ARGS__) |
| #define vlsseg5e16_v_u16mf4_m | ( | ... | ) | __riscv_vlsseg5e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlsseg5e32_v_f32m1 | ( | ... | ) | __riscv_vlsseg5e32_v_f32m1(__VA_ARGS__) |
| #define vlsseg5e32_v_f32m1_m | ( | ... | ) | __riscv_vlsseg5e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlsseg5e32_v_f32mf2 | ( | ... | ) | __riscv_vlsseg5e32_v_f32mf2(__VA_ARGS__) |
| #define vlsseg5e32_v_f32mf2_m | ( | ... | ) | __riscv_vlsseg5e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlsseg5e32_v_i32m1 | ( | ... | ) | __riscv_vlsseg5e32_v_i32m1(__VA_ARGS__) |
| #define vlsseg5e32_v_i32m1_m | ( | ... | ) | __riscv_vlsseg5e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlsseg5e32_v_i32mf2 | ( | ... | ) | __riscv_vlsseg5e32_v_i32mf2(__VA_ARGS__) |
| #define vlsseg5e32_v_i32mf2_m | ( | ... | ) | __riscv_vlsseg5e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlsseg5e32_v_u32m1 | ( | ... | ) | __riscv_vlsseg5e32_v_u32m1(__VA_ARGS__) |
| #define vlsseg5e32_v_u32m1_m | ( | ... | ) | __riscv_vlsseg5e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlsseg5e32_v_u32mf2 | ( | ... | ) | __riscv_vlsseg5e32_v_u32mf2(__VA_ARGS__) |
| #define vlsseg5e32_v_u32mf2_m | ( | ... | ) | __riscv_vlsseg5e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlsseg5e64_v_f64m1 | ( | ... | ) | __riscv_vlsseg5e64_v_f64m1(__VA_ARGS__) |
| #define vlsseg5e64_v_f64m1_m | ( | ... | ) | __riscv_vlsseg5e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlsseg5e64_v_i64m1 | ( | ... | ) | __riscv_vlsseg5e64_v_i64m1(__VA_ARGS__) |
| #define vlsseg5e64_v_i64m1_m | ( | ... | ) | __riscv_vlsseg5e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlsseg5e64_v_u64m1 | ( | ... | ) | __riscv_vlsseg5e64_v_u64m1(__VA_ARGS__) |
| #define vlsseg5e64_v_u64m1_m | ( | ... | ) | __riscv_vlsseg5e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlsseg5e8_v_i8m1 | ( | ... | ) | __riscv_vlsseg5e8_v_i8m1(__VA_ARGS__) |
| #define vlsseg5e8_v_i8m1_m | ( | ... | ) | __riscv_vlsseg5e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlsseg5e8_v_i8mf2 | ( | ... | ) | __riscv_vlsseg5e8_v_i8mf2(__VA_ARGS__) |
| #define vlsseg5e8_v_i8mf2_m | ( | ... | ) | __riscv_vlsseg5e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlsseg5e8_v_i8mf4 | ( | ... | ) | __riscv_vlsseg5e8_v_i8mf4(__VA_ARGS__) |
| #define vlsseg5e8_v_i8mf4_m | ( | ... | ) | __riscv_vlsseg5e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlsseg5e8_v_i8mf8 | ( | ... | ) | __riscv_vlsseg5e8_v_i8mf8(__VA_ARGS__) |
| #define vlsseg5e8_v_i8mf8_m | ( | ... | ) | __riscv_vlsseg5e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlsseg5e8_v_u8m1 | ( | ... | ) | __riscv_vlsseg5e8_v_u8m1(__VA_ARGS__) |
| #define vlsseg5e8_v_u8m1_m | ( | ... | ) | __riscv_vlsseg5e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlsseg5e8_v_u8mf2 | ( | ... | ) | __riscv_vlsseg5e8_v_u8mf2(__VA_ARGS__) |
| #define vlsseg5e8_v_u8mf2_m | ( | ... | ) | __riscv_vlsseg5e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlsseg5e8_v_u8mf4 | ( | ... | ) | __riscv_vlsseg5e8_v_u8mf4(__VA_ARGS__) |
| #define vlsseg5e8_v_u8mf4_m | ( | ... | ) | __riscv_vlsseg5e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlsseg5e8_v_u8mf8 | ( | ... | ) | __riscv_vlsseg5e8_v_u8mf8(__VA_ARGS__) |
| #define vlsseg5e8_v_u8mf8_m | ( | ... | ) | __riscv_vlsseg5e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_f16m1 | ( | ... | ) | __riscv_vlsseg6e16_v_f16m1(__VA_ARGS__) |
| #define vlsseg6e16_v_f16m1_m | ( | ... | ) | __riscv_vlsseg6e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_f16mf2 | ( | ... | ) | __riscv_vlsseg6e16_v_f16mf2(__VA_ARGS__) |
| #define vlsseg6e16_v_f16mf2_m | ( | ... | ) | __riscv_vlsseg6e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_f16mf4 | ( | ... | ) | __riscv_vlsseg6e16_v_f16mf4(__VA_ARGS__) |
| #define vlsseg6e16_v_f16mf4_m | ( | ... | ) | __riscv_vlsseg6e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_i16m1 | ( | ... | ) | __riscv_vlsseg6e16_v_i16m1(__VA_ARGS__) |
| #define vlsseg6e16_v_i16m1_m | ( | ... | ) | __riscv_vlsseg6e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_i16mf2 | ( | ... | ) | __riscv_vlsseg6e16_v_i16mf2(__VA_ARGS__) |
| #define vlsseg6e16_v_i16mf2_m | ( | ... | ) | __riscv_vlsseg6e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_i16mf4 | ( | ... | ) | __riscv_vlsseg6e16_v_i16mf4(__VA_ARGS__) |
| #define vlsseg6e16_v_i16mf4_m | ( | ... | ) | __riscv_vlsseg6e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_u16m1 | ( | ... | ) | __riscv_vlsseg6e16_v_u16m1(__VA_ARGS__) |
| #define vlsseg6e16_v_u16m1_m | ( | ... | ) | __riscv_vlsseg6e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_u16mf2 | ( | ... | ) | __riscv_vlsseg6e16_v_u16mf2(__VA_ARGS__) |
| #define vlsseg6e16_v_u16mf2_m | ( | ... | ) | __riscv_vlsseg6e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlsseg6e16_v_u16mf4 | ( | ... | ) | __riscv_vlsseg6e16_v_u16mf4(__VA_ARGS__) |
| #define vlsseg6e16_v_u16mf4_m | ( | ... | ) | __riscv_vlsseg6e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlsseg6e32_v_f32m1 | ( | ... | ) | __riscv_vlsseg6e32_v_f32m1(__VA_ARGS__) |
| #define vlsseg6e32_v_f32m1_m | ( | ... | ) | __riscv_vlsseg6e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlsseg6e32_v_f32mf2 | ( | ... | ) | __riscv_vlsseg6e32_v_f32mf2(__VA_ARGS__) |
| #define vlsseg6e32_v_f32mf2_m | ( | ... | ) | __riscv_vlsseg6e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlsseg6e32_v_i32m1 | ( | ... | ) | __riscv_vlsseg6e32_v_i32m1(__VA_ARGS__) |
| #define vlsseg6e32_v_i32m1_m | ( | ... | ) | __riscv_vlsseg6e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlsseg6e32_v_i32mf2 | ( | ... | ) | __riscv_vlsseg6e32_v_i32mf2(__VA_ARGS__) |
| #define vlsseg6e32_v_i32mf2_m | ( | ... | ) | __riscv_vlsseg6e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlsseg6e32_v_u32m1 | ( | ... | ) | __riscv_vlsseg6e32_v_u32m1(__VA_ARGS__) |
| #define vlsseg6e32_v_u32m1_m | ( | ... | ) | __riscv_vlsseg6e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlsseg6e32_v_u32mf2 | ( | ... | ) | __riscv_vlsseg6e32_v_u32mf2(__VA_ARGS__) |
| #define vlsseg6e32_v_u32mf2_m | ( | ... | ) | __riscv_vlsseg6e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlsseg6e64_v_f64m1 | ( | ... | ) | __riscv_vlsseg6e64_v_f64m1(__VA_ARGS__) |
| #define vlsseg6e64_v_f64m1_m | ( | ... | ) | __riscv_vlsseg6e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlsseg6e64_v_i64m1 | ( | ... | ) | __riscv_vlsseg6e64_v_i64m1(__VA_ARGS__) |
| #define vlsseg6e64_v_i64m1_m | ( | ... | ) | __riscv_vlsseg6e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlsseg6e64_v_u64m1 | ( | ... | ) | __riscv_vlsseg6e64_v_u64m1(__VA_ARGS__) |
| #define vlsseg6e64_v_u64m1_m | ( | ... | ) | __riscv_vlsseg6e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlsseg6e8_v_i8m1 | ( | ... | ) | __riscv_vlsseg6e8_v_i8m1(__VA_ARGS__) |
| #define vlsseg6e8_v_i8m1_m | ( | ... | ) | __riscv_vlsseg6e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlsseg6e8_v_i8mf2 | ( | ... | ) | __riscv_vlsseg6e8_v_i8mf2(__VA_ARGS__) |
| #define vlsseg6e8_v_i8mf2_m | ( | ... | ) | __riscv_vlsseg6e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlsseg6e8_v_i8mf4 | ( | ... | ) | __riscv_vlsseg6e8_v_i8mf4(__VA_ARGS__) |
| #define vlsseg6e8_v_i8mf4_m | ( | ... | ) | __riscv_vlsseg6e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlsseg6e8_v_i8mf8 | ( | ... | ) | __riscv_vlsseg6e8_v_i8mf8(__VA_ARGS__) |
| #define vlsseg6e8_v_i8mf8_m | ( | ... | ) | __riscv_vlsseg6e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlsseg6e8_v_u8m1 | ( | ... | ) | __riscv_vlsseg6e8_v_u8m1(__VA_ARGS__) |
| #define vlsseg6e8_v_u8m1_m | ( | ... | ) | __riscv_vlsseg6e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlsseg6e8_v_u8mf2 | ( | ... | ) | __riscv_vlsseg6e8_v_u8mf2(__VA_ARGS__) |
| #define vlsseg6e8_v_u8mf2_m | ( | ... | ) | __riscv_vlsseg6e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlsseg6e8_v_u8mf4 | ( | ... | ) | __riscv_vlsseg6e8_v_u8mf4(__VA_ARGS__) |
| #define vlsseg6e8_v_u8mf4_m | ( | ... | ) | __riscv_vlsseg6e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlsseg6e8_v_u8mf8 | ( | ... | ) | __riscv_vlsseg6e8_v_u8mf8(__VA_ARGS__) |
| #define vlsseg6e8_v_u8mf8_m | ( | ... | ) | __riscv_vlsseg6e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_f16m1 | ( | ... | ) | __riscv_vlsseg7e16_v_f16m1(__VA_ARGS__) |
| #define vlsseg7e16_v_f16m1_m | ( | ... | ) | __riscv_vlsseg7e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_f16mf2 | ( | ... | ) | __riscv_vlsseg7e16_v_f16mf2(__VA_ARGS__) |
| #define vlsseg7e16_v_f16mf2_m | ( | ... | ) | __riscv_vlsseg7e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_f16mf4 | ( | ... | ) | __riscv_vlsseg7e16_v_f16mf4(__VA_ARGS__) |
| #define vlsseg7e16_v_f16mf4_m | ( | ... | ) | __riscv_vlsseg7e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_i16m1 | ( | ... | ) | __riscv_vlsseg7e16_v_i16m1(__VA_ARGS__) |
| #define vlsseg7e16_v_i16m1_m | ( | ... | ) | __riscv_vlsseg7e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_i16mf2 | ( | ... | ) | __riscv_vlsseg7e16_v_i16mf2(__VA_ARGS__) |
| #define vlsseg7e16_v_i16mf2_m | ( | ... | ) | __riscv_vlsseg7e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_i16mf4 | ( | ... | ) | __riscv_vlsseg7e16_v_i16mf4(__VA_ARGS__) |
| #define vlsseg7e16_v_i16mf4_m | ( | ... | ) | __riscv_vlsseg7e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_u16m1 | ( | ... | ) | __riscv_vlsseg7e16_v_u16m1(__VA_ARGS__) |
| #define vlsseg7e16_v_u16m1_m | ( | ... | ) | __riscv_vlsseg7e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_u16mf2 | ( | ... | ) | __riscv_vlsseg7e16_v_u16mf2(__VA_ARGS__) |
| #define vlsseg7e16_v_u16mf2_m | ( | ... | ) | __riscv_vlsseg7e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlsseg7e16_v_u16mf4 | ( | ... | ) | __riscv_vlsseg7e16_v_u16mf4(__VA_ARGS__) |
| #define vlsseg7e16_v_u16mf4_m | ( | ... | ) | __riscv_vlsseg7e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlsseg7e32_v_f32m1 | ( | ... | ) | __riscv_vlsseg7e32_v_f32m1(__VA_ARGS__) |
| #define vlsseg7e32_v_f32m1_m | ( | ... | ) | __riscv_vlsseg7e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlsseg7e32_v_f32mf2 | ( | ... | ) | __riscv_vlsseg7e32_v_f32mf2(__VA_ARGS__) |
| #define vlsseg7e32_v_f32mf2_m | ( | ... | ) | __riscv_vlsseg7e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlsseg7e32_v_i32m1 | ( | ... | ) | __riscv_vlsseg7e32_v_i32m1(__VA_ARGS__) |
| #define vlsseg7e32_v_i32m1_m | ( | ... | ) | __riscv_vlsseg7e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlsseg7e32_v_i32mf2 | ( | ... | ) | __riscv_vlsseg7e32_v_i32mf2(__VA_ARGS__) |
| #define vlsseg7e32_v_i32mf2_m | ( | ... | ) | __riscv_vlsseg7e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlsseg7e32_v_u32m1 | ( | ... | ) | __riscv_vlsseg7e32_v_u32m1(__VA_ARGS__) |
| #define vlsseg7e32_v_u32m1_m | ( | ... | ) | __riscv_vlsseg7e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlsseg7e32_v_u32mf2 | ( | ... | ) | __riscv_vlsseg7e32_v_u32mf2(__VA_ARGS__) |
| #define vlsseg7e32_v_u32mf2_m | ( | ... | ) | __riscv_vlsseg7e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlsseg7e64_v_f64m1 | ( | ... | ) | __riscv_vlsseg7e64_v_f64m1(__VA_ARGS__) |
| #define vlsseg7e64_v_f64m1_m | ( | ... | ) | __riscv_vlsseg7e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlsseg7e64_v_i64m1 | ( | ... | ) | __riscv_vlsseg7e64_v_i64m1(__VA_ARGS__) |
| #define vlsseg7e64_v_i64m1_m | ( | ... | ) | __riscv_vlsseg7e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlsseg7e64_v_u64m1 | ( | ... | ) | __riscv_vlsseg7e64_v_u64m1(__VA_ARGS__) |
| #define vlsseg7e64_v_u64m1_m | ( | ... | ) | __riscv_vlsseg7e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlsseg7e8_v_i8m1 | ( | ... | ) | __riscv_vlsseg7e8_v_i8m1(__VA_ARGS__) |
| #define vlsseg7e8_v_i8m1_m | ( | ... | ) | __riscv_vlsseg7e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlsseg7e8_v_i8mf2 | ( | ... | ) | __riscv_vlsseg7e8_v_i8mf2(__VA_ARGS__) |
| #define vlsseg7e8_v_i8mf2_m | ( | ... | ) | __riscv_vlsseg7e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlsseg7e8_v_i8mf4 | ( | ... | ) | __riscv_vlsseg7e8_v_i8mf4(__VA_ARGS__) |
| #define vlsseg7e8_v_i8mf4_m | ( | ... | ) | __riscv_vlsseg7e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlsseg7e8_v_i8mf8 | ( | ... | ) | __riscv_vlsseg7e8_v_i8mf8(__VA_ARGS__) |
| #define vlsseg7e8_v_i8mf8_m | ( | ... | ) | __riscv_vlsseg7e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlsseg7e8_v_u8m1 | ( | ... | ) | __riscv_vlsseg7e8_v_u8m1(__VA_ARGS__) |
| #define vlsseg7e8_v_u8m1_m | ( | ... | ) | __riscv_vlsseg7e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlsseg7e8_v_u8mf2 | ( | ... | ) | __riscv_vlsseg7e8_v_u8mf2(__VA_ARGS__) |
| #define vlsseg7e8_v_u8mf2_m | ( | ... | ) | __riscv_vlsseg7e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlsseg7e8_v_u8mf4 | ( | ... | ) | __riscv_vlsseg7e8_v_u8mf4(__VA_ARGS__) |
| #define vlsseg7e8_v_u8mf4_m | ( | ... | ) | __riscv_vlsseg7e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlsseg7e8_v_u8mf8 | ( | ... | ) | __riscv_vlsseg7e8_v_u8mf8(__VA_ARGS__) |
| #define vlsseg7e8_v_u8mf8_m | ( | ... | ) | __riscv_vlsseg7e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_f16m1 | ( | ... | ) | __riscv_vlsseg8e16_v_f16m1(__VA_ARGS__) |
| #define vlsseg8e16_v_f16m1_m | ( | ... | ) | __riscv_vlsseg8e16_v_f16m1_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_f16mf2 | ( | ... | ) | __riscv_vlsseg8e16_v_f16mf2(__VA_ARGS__) |
| #define vlsseg8e16_v_f16mf2_m | ( | ... | ) | __riscv_vlsseg8e16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_f16mf4 | ( | ... | ) | __riscv_vlsseg8e16_v_f16mf4(__VA_ARGS__) |
| #define vlsseg8e16_v_f16mf4_m | ( | ... | ) | __riscv_vlsseg8e16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_i16m1 | ( | ... | ) | __riscv_vlsseg8e16_v_i16m1(__VA_ARGS__) |
| #define vlsseg8e16_v_i16m1_m | ( | ... | ) | __riscv_vlsseg8e16_v_i16m1_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_i16mf2 | ( | ... | ) | __riscv_vlsseg8e16_v_i16mf2(__VA_ARGS__) |
| #define vlsseg8e16_v_i16mf2_m | ( | ... | ) | __riscv_vlsseg8e16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_i16mf4 | ( | ... | ) | __riscv_vlsseg8e16_v_i16mf4(__VA_ARGS__) |
| #define vlsseg8e16_v_i16mf4_m | ( | ... | ) | __riscv_vlsseg8e16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_u16m1 | ( | ... | ) | __riscv_vlsseg8e16_v_u16m1(__VA_ARGS__) |
| #define vlsseg8e16_v_u16m1_m | ( | ... | ) | __riscv_vlsseg8e16_v_u16m1_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_u16mf2 | ( | ... | ) | __riscv_vlsseg8e16_v_u16mf2(__VA_ARGS__) |
| #define vlsseg8e16_v_u16mf2_m | ( | ... | ) | __riscv_vlsseg8e16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vlsseg8e16_v_u16mf4 | ( | ... | ) | __riscv_vlsseg8e16_v_u16mf4(__VA_ARGS__) |
| #define vlsseg8e16_v_u16mf4_m | ( | ... | ) | __riscv_vlsseg8e16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vlsseg8e32_v_f32m1 | ( | ... | ) | __riscv_vlsseg8e32_v_f32m1(__VA_ARGS__) |
| #define vlsseg8e32_v_f32m1_m | ( | ... | ) | __riscv_vlsseg8e32_v_f32m1_tumu(__VA_ARGS__) |
| #define vlsseg8e32_v_f32mf2 | ( | ... | ) | __riscv_vlsseg8e32_v_f32mf2(__VA_ARGS__) |
| #define vlsseg8e32_v_f32mf2_m | ( | ... | ) | __riscv_vlsseg8e32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vlsseg8e32_v_i32m1 | ( | ... | ) | __riscv_vlsseg8e32_v_i32m1(__VA_ARGS__) |
| #define vlsseg8e32_v_i32m1_m | ( | ... | ) | __riscv_vlsseg8e32_v_i32m1_tumu(__VA_ARGS__) |
| #define vlsseg8e32_v_i32mf2 | ( | ... | ) | __riscv_vlsseg8e32_v_i32mf2(__VA_ARGS__) |
| #define vlsseg8e32_v_i32mf2_m | ( | ... | ) | __riscv_vlsseg8e32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vlsseg8e32_v_u32m1 | ( | ... | ) | __riscv_vlsseg8e32_v_u32m1(__VA_ARGS__) |
| #define vlsseg8e32_v_u32m1_m | ( | ... | ) | __riscv_vlsseg8e32_v_u32m1_tumu(__VA_ARGS__) |
| #define vlsseg8e32_v_u32mf2 | ( | ... | ) | __riscv_vlsseg8e32_v_u32mf2(__VA_ARGS__) |
| #define vlsseg8e32_v_u32mf2_m | ( | ... | ) | __riscv_vlsseg8e32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vlsseg8e64_v_f64m1 | ( | ... | ) | __riscv_vlsseg8e64_v_f64m1(__VA_ARGS__) |
| #define vlsseg8e64_v_f64m1_m | ( | ... | ) | __riscv_vlsseg8e64_v_f64m1_tumu(__VA_ARGS__) |
| #define vlsseg8e64_v_i64m1 | ( | ... | ) | __riscv_vlsseg8e64_v_i64m1(__VA_ARGS__) |
| #define vlsseg8e64_v_i64m1_m | ( | ... | ) | __riscv_vlsseg8e64_v_i64m1_tumu(__VA_ARGS__) |
| #define vlsseg8e64_v_u64m1 | ( | ... | ) | __riscv_vlsseg8e64_v_u64m1(__VA_ARGS__) |
| #define vlsseg8e64_v_u64m1_m | ( | ... | ) | __riscv_vlsseg8e64_v_u64m1_tumu(__VA_ARGS__) |
| #define vlsseg8e8_v_i8m1 | ( | ... | ) | __riscv_vlsseg8e8_v_i8m1(__VA_ARGS__) |
| #define vlsseg8e8_v_i8m1_m | ( | ... | ) | __riscv_vlsseg8e8_v_i8m1_tumu(__VA_ARGS__) |
| #define vlsseg8e8_v_i8mf2 | ( | ... | ) | __riscv_vlsseg8e8_v_i8mf2(__VA_ARGS__) |
| #define vlsseg8e8_v_i8mf2_m | ( | ... | ) | __riscv_vlsseg8e8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vlsseg8e8_v_i8mf4 | ( | ... | ) | __riscv_vlsseg8e8_v_i8mf4(__VA_ARGS__) |
| #define vlsseg8e8_v_i8mf4_m | ( | ... | ) | __riscv_vlsseg8e8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vlsseg8e8_v_i8mf8 | ( | ... | ) | __riscv_vlsseg8e8_v_i8mf8(__VA_ARGS__) |
| #define vlsseg8e8_v_i8mf8_m | ( | ... | ) | __riscv_vlsseg8e8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vlsseg8e8_v_u8m1 | ( | ... | ) | __riscv_vlsseg8e8_v_u8m1(__VA_ARGS__) |
| #define vlsseg8e8_v_u8m1_m | ( | ... | ) | __riscv_vlsseg8e8_v_u8m1_tumu(__VA_ARGS__) |
| #define vlsseg8e8_v_u8mf2 | ( | ... | ) | __riscv_vlsseg8e8_v_u8mf2(__VA_ARGS__) |
| #define vlsseg8e8_v_u8mf2_m | ( | ... | ) | __riscv_vlsseg8e8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vlsseg8e8_v_u8mf4 | ( | ... | ) | __riscv_vlsseg8e8_v_u8mf4(__VA_ARGS__) |
| #define vlsseg8e8_v_u8mf4_m | ( | ... | ) | __riscv_vlsseg8e8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vlsseg8e8_v_u8mf8 | ( | ... | ) | __riscv_vlsseg8e8_v_u8mf8(__VA_ARGS__) |
| #define vlsseg8e8_v_u8mf8_m | ( | ... | ) | __riscv_vlsseg8e8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxei16_v_f16m1 | ( | ... | ) | __riscv_vluxei16_v_f16m1(__VA_ARGS__) |
| #define vluxei16_v_f16m1_m | ( | ... | ) | __riscv_vluxei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_f16m2 | ( | ... | ) | __riscv_vluxei16_v_f16m2(__VA_ARGS__) |
| #define vluxei16_v_f16m2_m | ( | ... | ) | __riscv_vluxei16_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_f16m4 | ( | ... | ) | __riscv_vluxei16_v_f16m4(__VA_ARGS__) |
| #define vluxei16_v_f16m4_m | ( | ... | ) | __riscv_vluxei16_v_f16m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_f16m8 | ( | ... | ) | __riscv_vluxei16_v_f16m8(__VA_ARGS__) |
| #define vluxei16_v_f16m8_m | ( | ... | ) | __riscv_vluxei16_v_f16m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_f16mf2 | ( | ... | ) | __riscv_vluxei16_v_f16mf2(__VA_ARGS__) |
| #define vluxei16_v_f16mf2_m | ( | ... | ) | __riscv_vluxei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxei16_v_f16mf4 | ( | ... | ) | __riscv_vluxei16_v_f16mf4(__VA_ARGS__) |
| #define vluxei16_v_f16mf4_m | ( | ... | ) | __riscv_vluxei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxei16_v_f32m1 | ( | ... | ) | __riscv_vluxei16_v_f32m1(__VA_ARGS__) |
| #define vluxei16_v_f32m1_m | ( | ... | ) | __riscv_vluxei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_f32m2 | ( | ... | ) | __riscv_vluxei16_v_f32m2(__VA_ARGS__) |
| #define vluxei16_v_f32m2_m | ( | ... | ) | __riscv_vluxei16_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_f32m4 | ( | ... | ) | __riscv_vluxei16_v_f32m4(__VA_ARGS__) |
| #define vluxei16_v_f32m4_m | ( | ... | ) | __riscv_vluxei16_v_f32m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_f32m8 | ( | ... | ) | __riscv_vluxei16_v_f32m8(__VA_ARGS__) |
| #define vluxei16_v_f32m8_m | ( | ... | ) | __riscv_vluxei16_v_f32m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_f32mf2 | ( | ... | ) | __riscv_vluxei16_v_f32mf2(__VA_ARGS__) |
| #define vluxei16_v_f32mf2_m | ( | ... | ) | __riscv_vluxei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxei16_v_f64m1 | ( | ... | ) | __riscv_vluxei16_v_f64m1(__VA_ARGS__) |
| #define vluxei16_v_f64m1_m | ( | ... | ) | __riscv_vluxei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_f64m2 | ( | ... | ) | __riscv_vluxei16_v_f64m2(__VA_ARGS__) |
| #define vluxei16_v_f64m2_m | ( | ... | ) | __riscv_vluxei16_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_f64m4 | ( | ... | ) | __riscv_vluxei16_v_f64m4(__VA_ARGS__) |
| #define vluxei16_v_f64m4_m | ( | ... | ) | __riscv_vluxei16_v_f64m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_f64m8 | ( | ... | ) | __riscv_vluxei16_v_f64m8(__VA_ARGS__) |
| #define vluxei16_v_f64m8_m | ( | ... | ) | __riscv_vluxei16_v_f64m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_i16m1 | ( | ... | ) | __riscv_vluxei16_v_i16m1(__VA_ARGS__) |
| #define vluxei16_v_i16m1_m | ( | ... | ) | __riscv_vluxei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_i16m2 | ( | ... | ) | __riscv_vluxei16_v_i16m2(__VA_ARGS__) |
| #define vluxei16_v_i16m2_m | ( | ... | ) | __riscv_vluxei16_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_i16m4 | ( | ... | ) | __riscv_vluxei16_v_i16m4(__VA_ARGS__) |
| #define vluxei16_v_i16m4_m | ( | ... | ) | __riscv_vluxei16_v_i16m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_i16m8 | ( | ... | ) | __riscv_vluxei16_v_i16m8(__VA_ARGS__) |
| #define vluxei16_v_i16m8_m | ( | ... | ) | __riscv_vluxei16_v_i16m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_i16mf2 | ( | ... | ) | __riscv_vluxei16_v_i16mf2(__VA_ARGS__) |
| #define vluxei16_v_i16mf2_m | ( | ... | ) | __riscv_vluxei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxei16_v_i16mf4 | ( | ... | ) | __riscv_vluxei16_v_i16mf4(__VA_ARGS__) |
| #define vluxei16_v_i16mf4_m | ( | ... | ) | __riscv_vluxei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxei16_v_i32m1 | ( | ... | ) | __riscv_vluxei16_v_i32m1(__VA_ARGS__) |
| #define vluxei16_v_i32m1_m | ( | ... | ) | __riscv_vluxei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_i32m2 | ( | ... | ) | __riscv_vluxei16_v_i32m2(__VA_ARGS__) |
| #define vluxei16_v_i32m2_m | ( | ... | ) | __riscv_vluxei16_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_i32m4 | ( | ... | ) | __riscv_vluxei16_v_i32m4(__VA_ARGS__) |
| #define vluxei16_v_i32m4_m | ( | ... | ) | __riscv_vluxei16_v_i32m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_i32m8 | ( | ... | ) | __riscv_vluxei16_v_i32m8(__VA_ARGS__) |
| #define vluxei16_v_i32m8_m | ( | ... | ) | __riscv_vluxei16_v_i32m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_i32mf2 | ( | ... | ) | __riscv_vluxei16_v_i32mf2(__VA_ARGS__) |
| #define vluxei16_v_i32mf2_m | ( | ... | ) | __riscv_vluxei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxei16_v_i64m1 | ( | ... | ) | __riscv_vluxei16_v_i64m1(__VA_ARGS__) |
| #define vluxei16_v_i64m1_m | ( | ... | ) | __riscv_vluxei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_i64m2 | ( | ... | ) | __riscv_vluxei16_v_i64m2(__VA_ARGS__) |
| #define vluxei16_v_i64m2_m | ( | ... | ) | __riscv_vluxei16_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_i64m4 | ( | ... | ) | __riscv_vluxei16_v_i64m4(__VA_ARGS__) |
| #define vluxei16_v_i64m4_m | ( | ... | ) | __riscv_vluxei16_v_i64m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_i64m8 | ( | ... | ) | __riscv_vluxei16_v_i64m8(__VA_ARGS__) |
| #define vluxei16_v_i64m8_m | ( | ... | ) | __riscv_vluxei16_v_i64m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_i8m1 | ( | ... | ) | __riscv_vluxei16_v_i8m1(__VA_ARGS__) |
| #define vluxei16_v_i8m1_m | ( | ... | ) | __riscv_vluxei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_i8m2 | ( | ... | ) | __riscv_vluxei16_v_i8m2(__VA_ARGS__) |
| #define vluxei16_v_i8m2_m | ( | ... | ) | __riscv_vluxei16_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_i8m4 | ( | ... | ) | __riscv_vluxei16_v_i8m4(__VA_ARGS__) |
| #define vluxei16_v_i8m4_m | ( | ... | ) | __riscv_vluxei16_v_i8m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_i8mf2 | ( | ... | ) | __riscv_vluxei16_v_i8mf2(__VA_ARGS__) |
| #define vluxei16_v_i8mf2_m | ( | ... | ) | __riscv_vluxei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxei16_v_i8mf4 | ( | ... | ) | __riscv_vluxei16_v_i8mf4(__VA_ARGS__) |
| #define vluxei16_v_i8mf4_m | ( | ... | ) | __riscv_vluxei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxei16_v_i8mf8 | ( | ... | ) | __riscv_vluxei16_v_i8mf8(__VA_ARGS__) |
| #define vluxei16_v_i8mf8_m | ( | ... | ) | __riscv_vluxei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxei16_v_u16m1 | ( | ... | ) | __riscv_vluxei16_v_u16m1(__VA_ARGS__) |
| #define vluxei16_v_u16m1_m | ( | ... | ) | __riscv_vluxei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_u16m2 | ( | ... | ) | __riscv_vluxei16_v_u16m2(__VA_ARGS__) |
| #define vluxei16_v_u16m2_m | ( | ... | ) | __riscv_vluxei16_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_u16m4 | ( | ... | ) | __riscv_vluxei16_v_u16m4(__VA_ARGS__) |
| #define vluxei16_v_u16m4_m | ( | ... | ) | __riscv_vluxei16_v_u16m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_u16m8 | ( | ... | ) | __riscv_vluxei16_v_u16m8(__VA_ARGS__) |
| #define vluxei16_v_u16m8_m | ( | ... | ) | __riscv_vluxei16_v_u16m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_u16mf2 | ( | ... | ) | __riscv_vluxei16_v_u16mf2(__VA_ARGS__) |
| #define vluxei16_v_u16mf2_m | ( | ... | ) | __riscv_vluxei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxei16_v_u16mf4 | ( | ... | ) | __riscv_vluxei16_v_u16mf4(__VA_ARGS__) |
| #define vluxei16_v_u16mf4_m | ( | ... | ) | __riscv_vluxei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxei16_v_u32m1 | ( | ... | ) | __riscv_vluxei16_v_u32m1(__VA_ARGS__) |
| #define vluxei16_v_u32m1_m | ( | ... | ) | __riscv_vluxei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_u32m2 | ( | ... | ) | __riscv_vluxei16_v_u32m2(__VA_ARGS__) |
| #define vluxei16_v_u32m2_m | ( | ... | ) | __riscv_vluxei16_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_u32m4 | ( | ... | ) | __riscv_vluxei16_v_u32m4(__VA_ARGS__) |
| #define vluxei16_v_u32m4_m | ( | ... | ) | __riscv_vluxei16_v_u32m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_u32m8 | ( | ... | ) | __riscv_vluxei16_v_u32m8(__VA_ARGS__) |
| #define vluxei16_v_u32m8_m | ( | ... | ) | __riscv_vluxei16_v_u32m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_u32mf2 | ( | ... | ) | __riscv_vluxei16_v_u32mf2(__VA_ARGS__) |
| #define vluxei16_v_u32mf2_m | ( | ... | ) | __riscv_vluxei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxei16_v_u64m1 | ( | ... | ) | __riscv_vluxei16_v_u64m1(__VA_ARGS__) |
| #define vluxei16_v_u64m1_m | ( | ... | ) | __riscv_vluxei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_u64m2 | ( | ... | ) | __riscv_vluxei16_v_u64m2(__VA_ARGS__) |
| #define vluxei16_v_u64m2_m | ( | ... | ) | __riscv_vluxei16_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_u64m4 | ( | ... | ) | __riscv_vluxei16_v_u64m4(__VA_ARGS__) |
| #define vluxei16_v_u64m4_m | ( | ... | ) | __riscv_vluxei16_v_u64m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_u64m8 | ( | ... | ) | __riscv_vluxei16_v_u64m8(__VA_ARGS__) |
| #define vluxei16_v_u64m8_m | ( | ... | ) | __riscv_vluxei16_v_u64m8_tumu(__VA_ARGS__) |
| #define vluxei16_v_u8m1 | ( | ... | ) | __riscv_vluxei16_v_u8m1(__VA_ARGS__) |
| #define vluxei16_v_u8m1_m | ( | ... | ) | __riscv_vluxei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxei16_v_u8m2 | ( | ... | ) | __riscv_vluxei16_v_u8m2(__VA_ARGS__) |
| #define vluxei16_v_u8m2_m | ( | ... | ) | __riscv_vluxei16_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxei16_v_u8m4 | ( | ... | ) | __riscv_vluxei16_v_u8m4(__VA_ARGS__) |
| #define vluxei16_v_u8m4_m | ( | ... | ) | __riscv_vluxei16_v_u8m4_tumu(__VA_ARGS__) |
| #define vluxei16_v_u8mf2 | ( | ... | ) | __riscv_vluxei16_v_u8mf2(__VA_ARGS__) |
| #define vluxei16_v_u8mf2_m | ( | ... | ) | __riscv_vluxei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxei16_v_u8mf4 | ( | ... | ) | __riscv_vluxei16_v_u8mf4(__VA_ARGS__) |
| #define vluxei16_v_u8mf4_m | ( | ... | ) | __riscv_vluxei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxei16_v_u8mf8 | ( | ... | ) | __riscv_vluxei16_v_u8mf8(__VA_ARGS__) |
| #define vluxei16_v_u8mf8_m | ( | ... | ) | __riscv_vluxei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxei32_v_f16m1 | ( | ... | ) | __riscv_vluxei32_v_f16m1(__VA_ARGS__) |
| #define vluxei32_v_f16m1_m | ( | ... | ) | __riscv_vluxei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_f16m2 | ( | ... | ) | __riscv_vluxei32_v_f16m2(__VA_ARGS__) |
| #define vluxei32_v_f16m2_m | ( | ... | ) | __riscv_vluxei32_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_f16m4 | ( | ... | ) | __riscv_vluxei32_v_f16m4(__VA_ARGS__) |
| #define vluxei32_v_f16m4_m | ( | ... | ) | __riscv_vluxei32_v_f16m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_f16mf2 | ( | ... | ) | __riscv_vluxei32_v_f16mf2(__VA_ARGS__) |
| #define vluxei32_v_f16mf2_m | ( | ... | ) | __riscv_vluxei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxei32_v_f16mf4 | ( | ... | ) | __riscv_vluxei32_v_f16mf4(__VA_ARGS__) |
| #define vluxei32_v_f16mf4_m | ( | ... | ) | __riscv_vluxei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxei32_v_f32m1 | ( | ... | ) | __riscv_vluxei32_v_f32m1(__VA_ARGS__) |
| #define vluxei32_v_f32m1_m | ( | ... | ) | __riscv_vluxei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_f32m2 | ( | ... | ) | __riscv_vluxei32_v_f32m2(__VA_ARGS__) |
| #define vluxei32_v_f32m2_m | ( | ... | ) | __riscv_vluxei32_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_f32m4 | ( | ... | ) | __riscv_vluxei32_v_f32m4(__VA_ARGS__) |
| #define vluxei32_v_f32m4_m | ( | ... | ) | __riscv_vluxei32_v_f32m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_f32m8 | ( | ... | ) | __riscv_vluxei32_v_f32m8(__VA_ARGS__) |
| #define vluxei32_v_f32m8_m | ( | ... | ) | __riscv_vluxei32_v_f32m8_tumu(__VA_ARGS__) |
| #define vluxei32_v_f32mf2 | ( | ... | ) | __riscv_vluxei32_v_f32mf2(__VA_ARGS__) |
| #define vluxei32_v_f32mf2_m | ( | ... | ) | __riscv_vluxei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxei32_v_f64m1 | ( | ... | ) | __riscv_vluxei32_v_f64m1(__VA_ARGS__) |
| #define vluxei32_v_f64m1_m | ( | ... | ) | __riscv_vluxei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_f64m2 | ( | ... | ) | __riscv_vluxei32_v_f64m2(__VA_ARGS__) |
| #define vluxei32_v_f64m2_m | ( | ... | ) | __riscv_vluxei32_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_f64m4 | ( | ... | ) | __riscv_vluxei32_v_f64m4(__VA_ARGS__) |
| #define vluxei32_v_f64m4_m | ( | ... | ) | __riscv_vluxei32_v_f64m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_f64m8 | ( | ... | ) | __riscv_vluxei32_v_f64m8(__VA_ARGS__) |
| #define vluxei32_v_f64m8_m | ( | ... | ) | __riscv_vluxei32_v_f64m8_tumu(__VA_ARGS__) |
| #define vluxei32_v_i16m1 | ( | ... | ) | __riscv_vluxei32_v_i16m1(__VA_ARGS__) |
| #define vluxei32_v_i16m1_m | ( | ... | ) | __riscv_vluxei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_i16m2 | ( | ... | ) | __riscv_vluxei32_v_i16m2(__VA_ARGS__) |
| #define vluxei32_v_i16m2_m | ( | ... | ) | __riscv_vluxei32_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_i16m4 | ( | ... | ) | __riscv_vluxei32_v_i16m4(__VA_ARGS__) |
| #define vluxei32_v_i16m4_m | ( | ... | ) | __riscv_vluxei32_v_i16m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_i16mf2 | ( | ... | ) | __riscv_vluxei32_v_i16mf2(__VA_ARGS__) |
| #define vluxei32_v_i16mf2_m | ( | ... | ) | __riscv_vluxei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxei32_v_i16mf4 | ( | ... | ) | __riscv_vluxei32_v_i16mf4(__VA_ARGS__) |
| #define vluxei32_v_i16mf4_m | ( | ... | ) | __riscv_vluxei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxei32_v_i32m1 | ( | ... | ) | __riscv_vluxei32_v_i32m1(__VA_ARGS__) |
| #define vluxei32_v_i32m1_m | ( | ... | ) | __riscv_vluxei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_i32m2 | ( | ... | ) | __riscv_vluxei32_v_i32m2(__VA_ARGS__) |
| #define vluxei32_v_i32m2_m | ( | ... | ) | __riscv_vluxei32_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_i32m4 | ( | ... | ) | __riscv_vluxei32_v_i32m4(__VA_ARGS__) |
| #define vluxei32_v_i32m4_m | ( | ... | ) | __riscv_vluxei32_v_i32m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_i32m8 | ( | ... | ) | __riscv_vluxei32_v_i32m8(__VA_ARGS__) |
| #define vluxei32_v_i32m8_m | ( | ... | ) | __riscv_vluxei32_v_i32m8_tumu(__VA_ARGS__) |
| #define vluxei32_v_i32mf2 | ( | ... | ) | __riscv_vluxei32_v_i32mf2(__VA_ARGS__) |
| #define vluxei32_v_i32mf2_m | ( | ... | ) | __riscv_vluxei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxei32_v_i64m1 | ( | ... | ) | __riscv_vluxei32_v_i64m1(__VA_ARGS__) |
| #define vluxei32_v_i64m1_m | ( | ... | ) | __riscv_vluxei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_i64m2 | ( | ... | ) | __riscv_vluxei32_v_i64m2(__VA_ARGS__) |
| #define vluxei32_v_i64m2_m | ( | ... | ) | __riscv_vluxei32_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_i64m4 | ( | ... | ) | __riscv_vluxei32_v_i64m4(__VA_ARGS__) |
| #define vluxei32_v_i64m4_m | ( | ... | ) | __riscv_vluxei32_v_i64m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_i64m8 | ( | ... | ) | __riscv_vluxei32_v_i64m8(__VA_ARGS__) |
| #define vluxei32_v_i64m8_m | ( | ... | ) | __riscv_vluxei32_v_i64m8_tumu(__VA_ARGS__) |
| #define vluxei32_v_i8m1 | ( | ... | ) | __riscv_vluxei32_v_i8m1(__VA_ARGS__) |
| #define vluxei32_v_i8m1_m | ( | ... | ) | __riscv_vluxei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_i8m2 | ( | ... | ) | __riscv_vluxei32_v_i8m2(__VA_ARGS__) |
| #define vluxei32_v_i8m2_m | ( | ... | ) | __riscv_vluxei32_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_i8mf2 | ( | ... | ) | __riscv_vluxei32_v_i8mf2(__VA_ARGS__) |
| #define vluxei32_v_i8mf2_m | ( | ... | ) | __riscv_vluxei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxei32_v_i8mf4 | ( | ... | ) | __riscv_vluxei32_v_i8mf4(__VA_ARGS__) |
| #define vluxei32_v_i8mf4_m | ( | ... | ) | __riscv_vluxei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxei32_v_i8mf8 | ( | ... | ) | __riscv_vluxei32_v_i8mf8(__VA_ARGS__) |
| #define vluxei32_v_i8mf8_m | ( | ... | ) | __riscv_vluxei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxei32_v_u16m1 | ( | ... | ) | __riscv_vluxei32_v_u16m1(__VA_ARGS__) |
| #define vluxei32_v_u16m1_m | ( | ... | ) | __riscv_vluxei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_u16m2 | ( | ... | ) | __riscv_vluxei32_v_u16m2(__VA_ARGS__) |
| #define vluxei32_v_u16m2_m | ( | ... | ) | __riscv_vluxei32_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_u16m4 | ( | ... | ) | __riscv_vluxei32_v_u16m4(__VA_ARGS__) |
| #define vluxei32_v_u16m4_m | ( | ... | ) | __riscv_vluxei32_v_u16m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_u16mf2 | ( | ... | ) | __riscv_vluxei32_v_u16mf2(__VA_ARGS__) |
| #define vluxei32_v_u16mf2_m | ( | ... | ) | __riscv_vluxei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxei32_v_u16mf4 | ( | ... | ) | __riscv_vluxei32_v_u16mf4(__VA_ARGS__) |
| #define vluxei32_v_u16mf4_m | ( | ... | ) | __riscv_vluxei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxei32_v_u32m1 | ( | ... | ) | __riscv_vluxei32_v_u32m1(__VA_ARGS__) |
| #define vluxei32_v_u32m1_m | ( | ... | ) | __riscv_vluxei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_u32m2 | ( | ... | ) | __riscv_vluxei32_v_u32m2(__VA_ARGS__) |
| #define vluxei32_v_u32m2_m | ( | ... | ) | __riscv_vluxei32_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_u32m4 | ( | ... | ) | __riscv_vluxei32_v_u32m4(__VA_ARGS__) |
| #define vluxei32_v_u32m4_m | ( | ... | ) | __riscv_vluxei32_v_u32m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_u32m8 | ( | ... | ) | __riscv_vluxei32_v_u32m8(__VA_ARGS__) |
| #define vluxei32_v_u32m8_m | ( | ... | ) | __riscv_vluxei32_v_u32m8_tumu(__VA_ARGS__) |
| #define vluxei32_v_u32mf2 | ( | ... | ) | __riscv_vluxei32_v_u32mf2(__VA_ARGS__) |
| #define vluxei32_v_u32mf2_m | ( | ... | ) | __riscv_vluxei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxei32_v_u64m1 | ( | ... | ) | __riscv_vluxei32_v_u64m1(__VA_ARGS__) |
| #define vluxei32_v_u64m1_m | ( | ... | ) | __riscv_vluxei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_u64m2 | ( | ... | ) | __riscv_vluxei32_v_u64m2(__VA_ARGS__) |
| #define vluxei32_v_u64m2_m | ( | ... | ) | __riscv_vluxei32_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_u64m4 | ( | ... | ) | __riscv_vluxei32_v_u64m4(__VA_ARGS__) |
| #define vluxei32_v_u64m4_m | ( | ... | ) | __riscv_vluxei32_v_u64m4_tumu(__VA_ARGS__) |
| #define vluxei32_v_u64m8 | ( | ... | ) | __riscv_vluxei32_v_u64m8(__VA_ARGS__) |
| #define vluxei32_v_u64m8_m | ( | ... | ) | __riscv_vluxei32_v_u64m8_tumu(__VA_ARGS__) |
| #define vluxei32_v_u8m1 | ( | ... | ) | __riscv_vluxei32_v_u8m1(__VA_ARGS__) |
| #define vluxei32_v_u8m1_m | ( | ... | ) | __riscv_vluxei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxei32_v_u8m2 | ( | ... | ) | __riscv_vluxei32_v_u8m2(__VA_ARGS__) |
| #define vluxei32_v_u8m2_m | ( | ... | ) | __riscv_vluxei32_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxei32_v_u8mf2 | ( | ... | ) | __riscv_vluxei32_v_u8mf2(__VA_ARGS__) |
| #define vluxei32_v_u8mf2_m | ( | ... | ) | __riscv_vluxei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxei32_v_u8mf4 | ( | ... | ) | __riscv_vluxei32_v_u8mf4(__VA_ARGS__) |
| #define vluxei32_v_u8mf4_m | ( | ... | ) | __riscv_vluxei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxei32_v_u8mf8 | ( | ... | ) | __riscv_vluxei32_v_u8mf8(__VA_ARGS__) |
| #define vluxei32_v_u8mf8_m | ( | ... | ) | __riscv_vluxei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxei64_v_f16m1 | ( | ... | ) | __riscv_vluxei64_v_f16m1(__VA_ARGS__) |
| #define vluxei64_v_f16m1_m | ( | ... | ) | __riscv_vluxei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_f16m2 | ( | ... | ) | __riscv_vluxei64_v_f16m2(__VA_ARGS__) |
| #define vluxei64_v_f16m2_m | ( | ... | ) | __riscv_vluxei64_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_f16mf2 | ( | ... | ) | __riscv_vluxei64_v_f16mf2(__VA_ARGS__) |
| #define vluxei64_v_f16mf2_m | ( | ... | ) | __riscv_vluxei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxei64_v_f16mf4 | ( | ... | ) | __riscv_vluxei64_v_f16mf4(__VA_ARGS__) |
| #define vluxei64_v_f16mf4_m | ( | ... | ) | __riscv_vluxei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxei64_v_f32m1 | ( | ... | ) | __riscv_vluxei64_v_f32m1(__VA_ARGS__) |
| #define vluxei64_v_f32m1_m | ( | ... | ) | __riscv_vluxei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_f32m2 | ( | ... | ) | __riscv_vluxei64_v_f32m2(__VA_ARGS__) |
| #define vluxei64_v_f32m2_m | ( | ... | ) | __riscv_vluxei64_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_f32m4 | ( | ... | ) | __riscv_vluxei64_v_f32m4(__VA_ARGS__) |
| #define vluxei64_v_f32m4_m | ( | ... | ) | __riscv_vluxei64_v_f32m4_tumu(__VA_ARGS__) |
| #define vluxei64_v_f32mf2 | ( | ... | ) | __riscv_vluxei64_v_f32mf2(__VA_ARGS__) |
| #define vluxei64_v_f32mf2_m | ( | ... | ) | __riscv_vluxei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxei64_v_f64m1 | ( | ... | ) | __riscv_vluxei64_v_f64m1(__VA_ARGS__) |
| #define vluxei64_v_f64m1_m | ( | ... | ) | __riscv_vluxei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_f64m2 | ( | ... | ) | __riscv_vluxei64_v_f64m2(__VA_ARGS__) |
| #define vluxei64_v_f64m2_m | ( | ... | ) | __riscv_vluxei64_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_f64m4 | ( | ... | ) | __riscv_vluxei64_v_f64m4(__VA_ARGS__) |
| #define vluxei64_v_f64m4_m | ( | ... | ) | __riscv_vluxei64_v_f64m4_tumu(__VA_ARGS__) |
| #define vluxei64_v_f64m8 | ( | ... | ) | __riscv_vluxei64_v_f64m8(__VA_ARGS__) |
| #define vluxei64_v_f64m8_m | ( | ... | ) | __riscv_vluxei64_v_f64m8_tumu(__VA_ARGS__) |
| #define vluxei64_v_i16m1 | ( | ... | ) | __riscv_vluxei64_v_i16m1(__VA_ARGS__) |
| #define vluxei64_v_i16m1_m | ( | ... | ) | __riscv_vluxei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_i16m2 | ( | ... | ) | __riscv_vluxei64_v_i16m2(__VA_ARGS__) |
| #define vluxei64_v_i16m2_m | ( | ... | ) | __riscv_vluxei64_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_i16mf2 | ( | ... | ) | __riscv_vluxei64_v_i16mf2(__VA_ARGS__) |
| #define vluxei64_v_i16mf2_m | ( | ... | ) | __riscv_vluxei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxei64_v_i16mf4 | ( | ... | ) | __riscv_vluxei64_v_i16mf4(__VA_ARGS__) |
| #define vluxei64_v_i16mf4_m | ( | ... | ) | __riscv_vluxei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxei64_v_i32m1 | ( | ... | ) | __riscv_vluxei64_v_i32m1(__VA_ARGS__) |
| #define vluxei64_v_i32m1_m | ( | ... | ) | __riscv_vluxei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_i32m2 | ( | ... | ) | __riscv_vluxei64_v_i32m2(__VA_ARGS__) |
| #define vluxei64_v_i32m2_m | ( | ... | ) | __riscv_vluxei64_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_i32m4 | ( | ... | ) | __riscv_vluxei64_v_i32m4(__VA_ARGS__) |
| #define vluxei64_v_i32m4_m | ( | ... | ) | __riscv_vluxei64_v_i32m4_tumu(__VA_ARGS__) |
| #define vluxei64_v_i32mf2 | ( | ... | ) | __riscv_vluxei64_v_i32mf2(__VA_ARGS__) |
| #define vluxei64_v_i32mf2_m | ( | ... | ) | __riscv_vluxei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxei64_v_i64m1 | ( | ... | ) | __riscv_vluxei64_v_i64m1(__VA_ARGS__) |
| #define vluxei64_v_i64m1_m | ( | ... | ) | __riscv_vluxei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_i64m2 | ( | ... | ) | __riscv_vluxei64_v_i64m2(__VA_ARGS__) |
| #define vluxei64_v_i64m2_m | ( | ... | ) | __riscv_vluxei64_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_i64m4 | ( | ... | ) | __riscv_vluxei64_v_i64m4(__VA_ARGS__) |
| #define vluxei64_v_i64m4_m | ( | ... | ) | __riscv_vluxei64_v_i64m4_tumu(__VA_ARGS__) |
| #define vluxei64_v_i64m8 | ( | ... | ) | __riscv_vluxei64_v_i64m8(__VA_ARGS__) |
| #define vluxei64_v_i64m8_m | ( | ... | ) | __riscv_vluxei64_v_i64m8_tumu(__VA_ARGS__) |
| #define vluxei64_v_i8m1 | ( | ... | ) | __riscv_vluxei64_v_i8m1(__VA_ARGS__) |
| #define vluxei64_v_i8m1_m | ( | ... | ) | __riscv_vluxei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_i8mf2 | ( | ... | ) | __riscv_vluxei64_v_i8mf2(__VA_ARGS__) |
| #define vluxei64_v_i8mf2_m | ( | ... | ) | __riscv_vluxei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxei64_v_i8mf4 | ( | ... | ) | __riscv_vluxei64_v_i8mf4(__VA_ARGS__) |
| #define vluxei64_v_i8mf4_m | ( | ... | ) | __riscv_vluxei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxei64_v_i8mf8 | ( | ... | ) | __riscv_vluxei64_v_i8mf8(__VA_ARGS__) |
| #define vluxei64_v_i8mf8_m | ( | ... | ) | __riscv_vluxei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxei64_v_u16m1 | ( | ... | ) | __riscv_vluxei64_v_u16m1(__VA_ARGS__) |
| #define vluxei64_v_u16m1_m | ( | ... | ) | __riscv_vluxei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_u16m2 | ( | ... | ) | __riscv_vluxei64_v_u16m2(__VA_ARGS__) |
| #define vluxei64_v_u16m2_m | ( | ... | ) | __riscv_vluxei64_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_u16mf2 | ( | ... | ) | __riscv_vluxei64_v_u16mf2(__VA_ARGS__) |
| #define vluxei64_v_u16mf2_m | ( | ... | ) | __riscv_vluxei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxei64_v_u16mf4 | ( | ... | ) | __riscv_vluxei64_v_u16mf4(__VA_ARGS__) |
| #define vluxei64_v_u16mf4_m | ( | ... | ) | __riscv_vluxei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxei64_v_u32m1 | ( | ... | ) | __riscv_vluxei64_v_u32m1(__VA_ARGS__) |
| #define vluxei64_v_u32m1_m | ( | ... | ) | __riscv_vluxei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_u32m2 | ( | ... | ) | __riscv_vluxei64_v_u32m2(__VA_ARGS__) |
| #define vluxei64_v_u32m2_m | ( | ... | ) | __riscv_vluxei64_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_u32m4 | ( | ... | ) | __riscv_vluxei64_v_u32m4(__VA_ARGS__) |
| #define vluxei64_v_u32m4_m | ( | ... | ) | __riscv_vluxei64_v_u32m4_tumu(__VA_ARGS__) |
| #define vluxei64_v_u32mf2 | ( | ... | ) | __riscv_vluxei64_v_u32mf2(__VA_ARGS__) |
| #define vluxei64_v_u32mf2_m | ( | ... | ) | __riscv_vluxei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxei64_v_u64m1 | ( | ... | ) | __riscv_vluxei64_v_u64m1(__VA_ARGS__) |
| #define vluxei64_v_u64m1_m | ( | ... | ) | __riscv_vluxei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_u64m2 | ( | ... | ) | __riscv_vluxei64_v_u64m2(__VA_ARGS__) |
| #define vluxei64_v_u64m2_m | ( | ... | ) | __riscv_vluxei64_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxei64_v_u64m4 | ( | ... | ) | __riscv_vluxei64_v_u64m4(__VA_ARGS__) |
| #define vluxei64_v_u64m4_m | ( | ... | ) | __riscv_vluxei64_v_u64m4_tumu(__VA_ARGS__) |
| #define vluxei64_v_u64m8 | ( | ... | ) | __riscv_vluxei64_v_u64m8(__VA_ARGS__) |
| #define vluxei64_v_u64m8_m | ( | ... | ) | __riscv_vluxei64_v_u64m8_tumu(__VA_ARGS__) |
| #define vluxei64_v_u8m1 | ( | ... | ) | __riscv_vluxei64_v_u8m1(__VA_ARGS__) |
| #define vluxei64_v_u8m1_m | ( | ... | ) | __riscv_vluxei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxei64_v_u8mf2 | ( | ... | ) | __riscv_vluxei64_v_u8mf2(__VA_ARGS__) |
| #define vluxei64_v_u8mf2_m | ( | ... | ) | __riscv_vluxei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxei64_v_u8mf4 | ( | ... | ) | __riscv_vluxei64_v_u8mf4(__VA_ARGS__) |
| #define vluxei64_v_u8mf4_m | ( | ... | ) | __riscv_vluxei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxei64_v_u8mf8 | ( | ... | ) | __riscv_vluxei64_v_u8mf8(__VA_ARGS__) |
| #define vluxei64_v_u8mf8_m | ( | ... | ) | __riscv_vluxei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxei8_v_f16m1 | ( | ... | ) | __riscv_vluxei8_v_f16m1(__VA_ARGS__) |
| #define vluxei8_v_f16m1_m | ( | ... | ) | __riscv_vluxei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_f16m2 | ( | ... | ) | __riscv_vluxei8_v_f16m2(__VA_ARGS__) |
| #define vluxei8_v_f16m2_m | ( | ... | ) | __riscv_vluxei8_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_f16m4 | ( | ... | ) | __riscv_vluxei8_v_f16m4(__VA_ARGS__) |
| #define vluxei8_v_f16m4_m | ( | ... | ) | __riscv_vluxei8_v_f16m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_f16m8 | ( | ... | ) | __riscv_vluxei8_v_f16m8(__VA_ARGS__) |
| #define vluxei8_v_f16m8_m | ( | ... | ) | __riscv_vluxei8_v_f16m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_f16mf2 | ( | ... | ) | __riscv_vluxei8_v_f16mf2(__VA_ARGS__) |
| #define vluxei8_v_f16mf2_m | ( | ... | ) | __riscv_vluxei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxei8_v_f16mf4 | ( | ... | ) | __riscv_vluxei8_v_f16mf4(__VA_ARGS__) |
| #define vluxei8_v_f16mf4_m | ( | ... | ) | __riscv_vluxei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxei8_v_f32m1 | ( | ... | ) | __riscv_vluxei8_v_f32m1(__VA_ARGS__) |
| #define vluxei8_v_f32m1_m | ( | ... | ) | __riscv_vluxei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_f32m2 | ( | ... | ) | __riscv_vluxei8_v_f32m2(__VA_ARGS__) |
| #define vluxei8_v_f32m2_m | ( | ... | ) | __riscv_vluxei8_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_f32m4 | ( | ... | ) | __riscv_vluxei8_v_f32m4(__VA_ARGS__) |
| #define vluxei8_v_f32m4_m | ( | ... | ) | __riscv_vluxei8_v_f32m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_f32m8 | ( | ... | ) | __riscv_vluxei8_v_f32m8(__VA_ARGS__) |
| #define vluxei8_v_f32m8_m | ( | ... | ) | __riscv_vluxei8_v_f32m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_f32mf2 | ( | ... | ) | __riscv_vluxei8_v_f32mf2(__VA_ARGS__) |
| #define vluxei8_v_f32mf2_m | ( | ... | ) | __riscv_vluxei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxei8_v_f64m1 | ( | ... | ) | __riscv_vluxei8_v_f64m1(__VA_ARGS__) |
| #define vluxei8_v_f64m1_m | ( | ... | ) | __riscv_vluxei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_f64m2 | ( | ... | ) | __riscv_vluxei8_v_f64m2(__VA_ARGS__) |
| #define vluxei8_v_f64m2_m | ( | ... | ) | __riscv_vluxei8_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_f64m4 | ( | ... | ) | __riscv_vluxei8_v_f64m4(__VA_ARGS__) |
| #define vluxei8_v_f64m4_m | ( | ... | ) | __riscv_vluxei8_v_f64m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_f64m8 | ( | ... | ) | __riscv_vluxei8_v_f64m8(__VA_ARGS__) |
| #define vluxei8_v_f64m8_m | ( | ... | ) | __riscv_vluxei8_v_f64m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_i16m1 | ( | ... | ) | __riscv_vluxei8_v_i16m1(__VA_ARGS__) |
| #define vluxei8_v_i16m1_m | ( | ... | ) | __riscv_vluxei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_i16m2 | ( | ... | ) | __riscv_vluxei8_v_i16m2(__VA_ARGS__) |
| #define vluxei8_v_i16m2_m | ( | ... | ) | __riscv_vluxei8_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_i16m4 | ( | ... | ) | __riscv_vluxei8_v_i16m4(__VA_ARGS__) |
| #define vluxei8_v_i16m4_m | ( | ... | ) | __riscv_vluxei8_v_i16m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_i16m8 | ( | ... | ) | __riscv_vluxei8_v_i16m8(__VA_ARGS__) |
| #define vluxei8_v_i16m8_m | ( | ... | ) | __riscv_vluxei8_v_i16m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_i16mf2 | ( | ... | ) | __riscv_vluxei8_v_i16mf2(__VA_ARGS__) |
| #define vluxei8_v_i16mf2_m | ( | ... | ) | __riscv_vluxei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxei8_v_i16mf4 | ( | ... | ) | __riscv_vluxei8_v_i16mf4(__VA_ARGS__) |
| #define vluxei8_v_i16mf4_m | ( | ... | ) | __riscv_vluxei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxei8_v_i32m1 | ( | ... | ) | __riscv_vluxei8_v_i32m1(__VA_ARGS__) |
| #define vluxei8_v_i32m1_m | ( | ... | ) | __riscv_vluxei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_i32m2 | ( | ... | ) | __riscv_vluxei8_v_i32m2(__VA_ARGS__) |
| #define vluxei8_v_i32m2_m | ( | ... | ) | __riscv_vluxei8_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_i32m4 | ( | ... | ) | __riscv_vluxei8_v_i32m4(__VA_ARGS__) |
| #define vluxei8_v_i32m4_m | ( | ... | ) | __riscv_vluxei8_v_i32m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_i32m8 | ( | ... | ) | __riscv_vluxei8_v_i32m8(__VA_ARGS__) |
| #define vluxei8_v_i32m8_m | ( | ... | ) | __riscv_vluxei8_v_i32m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_i32mf2 | ( | ... | ) | __riscv_vluxei8_v_i32mf2(__VA_ARGS__) |
| #define vluxei8_v_i32mf2_m | ( | ... | ) | __riscv_vluxei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxei8_v_i64m1 | ( | ... | ) | __riscv_vluxei8_v_i64m1(__VA_ARGS__) |
| #define vluxei8_v_i64m1_m | ( | ... | ) | __riscv_vluxei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_i64m2 | ( | ... | ) | __riscv_vluxei8_v_i64m2(__VA_ARGS__) |
| #define vluxei8_v_i64m2_m | ( | ... | ) | __riscv_vluxei8_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_i64m4 | ( | ... | ) | __riscv_vluxei8_v_i64m4(__VA_ARGS__) |
| #define vluxei8_v_i64m4_m | ( | ... | ) | __riscv_vluxei8_v_i64m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_i64m8 | ( | ... | ) | __riscv_vluxei8_v_i64m8(__VA_ARGS__) |
| #define vluxei8_v_i64m8_m | ( | ... | ) | __riscv_vluxei8_v_i64m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_i8m1 | ( | ... | ) | __riscv_vluxei8_v_i8m1(__VA_ARGS__) |
| #define vluxei8_v_i8m1_m | ( | ... | ) | __riscv_vluxei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_i8m2 | ( | ... | ) | __riscv_vluxei8_v_i8m2(__VA_ARGS__) |
| #define vluxei8_v_i8m2_m | ( | ... | ) | __riscv_vluxei8_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_i8m4 | ( | ... | ) | __riscv_vluxei8_v_i8m4(__VA_ARGS__) |
| #define vluxei8_v_i8m4_m | ( | ... | ) | __riscv_vluxei8_v_i8m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_i8m8 | ( | ... | ) | __riscv_vluxei8_v_i8m8(__VA_ARGS__) |
| #define vluxei8_v_i8m8_m | ( | ... | ) | __riscv_vluxei8_v_i8m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_i8mf2 | ( | ... | ) | __riscv_vluxei8_v_i8mf2(__VA_ARGS__) |
| #define vluxei8_v_i8mf2_m | ( | ... | ) | __riscv_vluxei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxei8_v_i8mf4 | ( | ... | ) | __riscv_vluxei8_v_i8mf4(__VA_ARGS__) |
| #define vluxei8_v_i8mf4_m | ( | ... | ) | __riscv_vluxei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxei8_v_i8mf8 | ( | ... | ) | __riscv_vluxei8_v_i8mf8(__VA_ARGS__) |
| #define vluxei8_v_i8mf8_m | ( | ... | ) | __riscv_vluxei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxei8_v_u16m1 | ( | ... | ) | __riscv_vluxei8_v_u16m1(__VA_ARGS__) |
| #define vluxei8_v_u16m1_m | ( | ... | ) | __riscv_vluxei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_u16m2 | ( | ... | ) | __riscv_vluxei8_v_u16m2(__VA_ARGS__) |
| #define vluxei8_v_u16m2_m | ( | ... | ) | __riscv_vluxei8_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_u16m4 | ( | ... | ) | __riscv_vluxei8_v_u16m4(__VA_ARGS__) |
| #define vluxei8_v_u16m4_m | ( | ... | ) | __riscv_vluxei8_v_u16m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_u16m8 | ( | ... | ) | __riscv_vluxei8_v_u16m8(__VA_ARGS__) |
| #define vluxei8_v_u16m8_m | ( | ... | ) | __riscv_vluxei8_v_u16m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_u16mf2 | ( | ... | ) | __riscv_vluxei8_v_u16mf2(__VA_ARGS__) |
| #define vluxei8_v_u16mf2_m | ( | ... | ) | __riscv_vluxei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxei8_v_u16mf4 | ( | ... | ) | __riscv_vluxei8_v_u16mf4(__VA_ARGS__) |
| #define vluxei8_v_u16mf4_m | ( | ... | ) | __riscv_vluxei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxei8_v_u32m1 | ( | ... | ) | __riscv_vluxei8_v_u32m1(__VA_ARGS__) |
| #define vluxei8_v_u32m1_m | ( | ... | ) | __riscv_vluxei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_u32m2 | ( | ... | ) | __riscv_vluxei8_v_u32m2(__VA_ARGS__) |
| #define vluxei8_v_u32m2_m | ( | ... | ) | __riscv_vluxei8_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_u32m4 | ( | ... | ) | __riscv_vluxei8_v_u32m4(__VA_ARGS__) |
| #define vluxei8_v_u32m4_m | ( | ... | ) | __riscv_vluxei8_v_u32m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_u32m8 | ( | ... | ) | __riscv_vluxei8_v_u32m8(__VA_ARGS__) |
| #define vluxei8_v_u32m8_m | ( | ... | ) | __riscv_vluxei8_v_u32m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_u32mf2 | ( | ... | ) | __riscv_vluxei8_v_u32mf2(__VA_ARGS__) |
| #define vluxei8_v_u32mf2_m | ( | ... | ) | __riscv_vluxei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxei8_v_u64m1 | ( | ... | ) | __riscv_vluxei8_v_u64m1(__VA_ARGS__) |
| #define vluxei8_v_u64m1_m | ( | ... | ) | __riscv_vluxei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_u64m2 | ( | ... | ) | __riscv_vluxei8_v_u64m2(__VA_ARGS__) |
| #define vluxei8_v_u64m2_m | ( | ... | ) | __riscv_vluxei8_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_u64m4 | ( | ... | ) | __riscv_vluxei8_v_u64m4(__VA_ARGS__) |
| #define vluxei8_v_u64m4_m | ( | ... | ) | __riscv_vluxei8_v_u64m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_u64m8 | ( | ... | ) | __riscv_vluxei8_v_u64m8(__VA_ARGS__) |
| #define vluxei8_v_u64m8_m | ( | ... | ) | __riscv_vluxei8_v_u64m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_u8m1 | ( | ... | ) | __riscv_vluxei8_v_u8m1(__VA_ARGS__) |
| #define vluxei8_v_u8m1_m | ( | ... | ) | __riscv_vluxei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxei8_v_u8m2 | ( | ... | ) | __riscv_vluxei8_v_u8m2(__VA_ARGS__) |
| #define vluxei8_v_u8m2_m | ( | ... | ) | __riscv_vluxei8_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxei8_v_u8m4 | ( | ... | ) | __riscv_vluxei8_v_u8m4(__VA_ARGS__) |
| #define vluxei8_v_u8m4_m | ( | ... | ) | __riscv_vluxei8_v_u8m4_tumu(__VA_ARGS__) |
| #define vluxei8_v_u8m8 | ( | ... | ) | __riscv_vluxei8_v_u8m8(__VA_ARGS__) |
| #define vluxei8_v_u8m8_m | ( | ... | ) | __riscv_vluxei8_v_u8m8_tumu(__VA_ARGS__) |
| #define vluxei8_v_u8mf2 | ( | ... | ) | __riscv_vluxei8_v_u8mf2(__VA_ARGS__) |
| #define vluxei8_v_u8mf2_m | ( | ... | ) | __riscv_vluxei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxei8_v_u8mf4 | ( | ... | ) | __riscv_vluxei8_v_u8mf4(__VA_ARGS__) |
| #define vluxei8_v_u8mf4_m | ( | ... | ) | __riscv_vluxei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxei8_v_u8mf8 | ( | ... | ) | __riscv_vluxei8_v_u8mf8(__VA_ARGS__) |
| #define vluxei8_v_u8mf8_m | ( | ... | ) | __riscv_vluxei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_f16m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_f16m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_f16m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16mf2 | ( | ... | ) | __riscv_vluxseg2ei16_v_f16mf2(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16mf4 | ( | ... | ) | __riscv_vluxseg2ei16_v_f16mf4(__VA_ARGS__) |
| #define vluxseg2ei16_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f32m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_f32m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_f32m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f32m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_f32m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_f32m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f32m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_f32m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_f32m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f32mf2 | ( | ... | ) | __riscv_vluxseg2ei16_v_f32mf2(__VA_ARGS__) |
| #define vluxseg2ei16_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f64m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_f64m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_f64m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f64m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_f64m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_f64m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_f64m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_f64m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_f64m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_f64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_i16m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_i16m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_i16m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16mf2 | ( | ... | ) | __riscv_vluxseg2ei16_v_i16mf2(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16mf4 | ( | ... | ) | __riscv_vluxseg2ei16_v_i16mf4(__VA_ARGS__) |
| #define vluxseg2ei16_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i32m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_i32m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_i32m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i32m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_i32m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_i32m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i32m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_i32m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_i32m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i32mf2 | ( | ... | ) | __riscv_vluxseg2ei16_v_i32mf2(__VA_ARGS__) |
| #define vluxseg2ei16_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i64m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_i64m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_i64m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i64m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_i64m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_i64m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i64m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_i64m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_i64m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_i8m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_i8m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_i8m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i8m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8mf2 | ( | ... | ) | __riscv_vluxseg2ei16_v_i8mf2(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8mf4 | ( | ... | ) | __riscv_vluxseg2ei16_v_i8mf4(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8mf8 | ( | ... | ) | __riscv_vluxseg2ei16_v_i8mf8(__VA_ARGS__) |
| #define vluxseg2ei16_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg2ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_u16m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_u16m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_u16m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16mf2 | ( | ... | ) | __riscv_vluxseg2ei16_v_u16mf2(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16mf4 | ( | ... | ) | __riscv_vluxseg2ei16_v_u16mf4(__VA_ARGS__) |
| #define vluxseg2ei16_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u32m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_u32m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_u32m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u32m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_u32m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_u32m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u32m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_u32m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_u32m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u32mf2 | ( | ... | ) | __riscv_vluxseg2ei16_v_u32mf2(__VA_ARGS__) |
| #define vluxseg2ei16_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u64m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_u64m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_u64m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u64m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_u64m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_u64m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u64m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_u64m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_u64m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8m1 | ( | ... | ) | __riscv_vluxseg2ei16_v_u8m1(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8m1_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8m2 | ( | ... | ) | __riscv_vluxseg2ei16_v_u8m2(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8m2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8m4 | ( | ... | ) | __riscv_vluxseg2ei16_v_u8m4(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8m4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u8m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8mf2 | ( | ... | ) | __riscv_vluxseg2ei16_v_u8mf2(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8mf4 | ( | ... | ) | __riscv_vluxseg2ei16_v_u8mf4(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8mf8 | ( | ... | ) | __riscv_vluxseg2ei16_v_u8mf8(__VA_ARGS__) |
| #define vluxseg2ei16_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg2ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_f16m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_f16m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_f16m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16mf2 | ( | ... | ) | __riscv_vluxseg2ei32_v_f16mf2(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16mf4 | ( | ... | ) | __riscv_vluxseg2ei32_v_f16mf4(__VA_ARGS__) |
| #define vluxseg2ei32_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f32m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_f32m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_f32m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f32m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_f32m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_f32m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f32m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_f32m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_f32m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f32mf2 | ( | ... | ) | __riscv_vluxseg2ei32_v_f32mf2(__VA_ARGS__) |
| #define vluxseg2ei32_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f64m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_f64m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_f64m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f64m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_f64m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_f64m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_f64m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_f64m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_f64m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_f64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_i16m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_i16m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_i16m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16mf2 | ( | ... | ) | __riscv_vluxseg2ei32_v_i16mf2(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16mf4 | ( | ... | ) | __riscv_vluxseg2ei32_v_i16mf4(__VA_ARGS__) |
| #define vluxseg2ei32_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i32m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_i32m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_i32m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i32m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_i32m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_i32m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i32m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_i32m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_i32m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i32mf2 | ( | ... | ) | __riscv_vluxseg2ei32_v_i32mf2(__VA_ARGS__) |
| #define vluxseg2ei32_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i64m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_i64m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_i64m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i64m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_i64m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_i64m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i64m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_i64m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_i64m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_i8m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_i8m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8mf2 | ( | ... | ) | __riscv_vluxseg2ei32_v_i8mf2(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8mf4 | ( | ... | ) | __riscv_vluxseg2ei32_v_i8mf4(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8mf8 | ( | ... | ) | __riscv_vluxseg2ei32_v_i8mf8(__VA_ARGS__) |
| #define vluxseg2ei32_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg2ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_u16m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_u16m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_u16m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16mf2 | ( | ... | ) | __riscv_vluxseg2ei32_v_u16mf2(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16mf4 | ( | ... | ) | __riscv_vluxseg2ei32_v_u16mf4(__VA_ARGS__) |
| #define vluxseg2ei32_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u32m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_u32m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_u32m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u32m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_u32m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_u32m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u32m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_u32m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_u32m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u32mf2 | ( | ... | ) | __riscv_vluxseg2ei32_v_u32mf2(__VA_ARGS__) |
| #define vluxseg2ei32_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u64m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_u64m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_u64m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u64m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_u64m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_u64m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u64m4 | ( | ... | ) | __riscv_vluxseg2ei32_v_u64m4(__VA_ARGS__) |
| #define vluxseg2ei32_v_u64m4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8m1 | ( | ... | ) | __riscv_vluxseg2ei32_v_u8m1(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8m1_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8m2 | ( | ... | ) | __riscv_vluxseg2ei32_v_u8m2(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8m2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8mf2 | ( | ... | ) | __riscv_vluxseg2ei32_v_u8mf2(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8mf4 | ( | ... | ) | __riscv_vluxseg2ei32_v_u8mf4(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8mf8 | ( | ... | ) | __riscv_vluxseg2ei32_v_u8mf8(__VA_ARGS__) |
| #define vluxseg2ei32_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg2ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f16m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_f16m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_f16m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f16m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_f16m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_f16m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f16mf2 | ( | ... | ) | __riscv_vluxseg2ei64_v_f16mf2(__VA_ARGS__) |
| #define vluxseg2ei64_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f16mf4 | ( | ... | ) | __riscv_vluxseg2ei64_v_f16mf4(__VA_ARGS__) |
| #define vluxseg2ei64_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f32m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_f32m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_f32m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f32m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_f32m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_f32m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f32m4 | ( | ... | ) | __riscv_vluxseg2ei64_v_f32m4(__VA_ARGS__) |
| #define vluxseg2ei64_v_f32m4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f32mf2 | ( | ... | ) | __riscv_vluxseg2ei64_v_f32mf2(__VA_ARGS__) |
| #define vluxseg2ei64_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f64m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_f64m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_f64m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f64m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_f64m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_f64m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_f64m4 | ( | ... | ) | __riscv_vluxseg2ei64_v_f64m4(__VA_ARGS__) |
| #define vluxseg2ei64_v_f64m4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_f64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i16m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_i16m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_i16m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i16m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_i16m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_i16m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i16mf2 | ( | ... | ) | __riscv_vluxseg2ei64_v_i16mf2(__VA_ARGS__) |
| #define vluxseg2ei64_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i16mf4 | ( | ... | ) | __riscv_vluxseg2ei64_v_i16mf4(__VA_ARGS__) |
| #define vluxseg2ei64_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i32m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_i32m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_i32m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i32m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_i32m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_i32m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i32m4 | ( | ... | ) | __riscv_vluxseg2ei64_v_i32m4(__VA_ARGS__) |
| #define vluxseg2ei64_v_i32m4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i32mf2 | ( | ... | ) | __riscv_vluxseg2ei64_v_i32mf2(__VA_ARGS__) |
| #define vluxseg2ei64_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i64m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_i64m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_i64m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i64m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_i64m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_i64m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i64m4 | ( | ... | ) | __riscv_vluxseg2ei64_v_i64m4(__VA_ARGS__) |
| #define vluxseg2ei64_v_i64m4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i8m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_i8m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_i8m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i8mf2 | ( | ... | ) | __riscv_vluxseg2ei64_v_i8mf2(__VA_ARGS__) |
| #define vluxseg2ei64_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i8mf4 | ( | ... | ) | __riscv_vluxseg2ei64_v_i8mf4(__VA_ARGS__) |
| #define vluxseg2ei64_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_i8mf8 | ( | ... | ) | __riscv_vluxseg2ei64_v_i8mf8(__VA_ARGS__) |
| #define vluxseg2ei64_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg2ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u16m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_u16m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_u16m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u16m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_u16m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_u16m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u16mf2 | ( | ... | ) | __riscv_vluxseg2ei64_v_u16mf2(__VA_ARGS__) |
| #define vluxseg2ei64_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u16mf4 | ( | ... | ) | __riscv_vluxseg2ei64_v_u16mf4(__VA_ARGS__) |
| #define vluxseg2ei64_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u32m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_u32m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_u32m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u32m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_u32m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_u32m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u32m4 | ( | ... | ) | __riscv_vluxseg2ei64_v_u32m4(__VA_ARGS__) |
| #define vluxseg2ei64_v_u32m4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u32mf2 | ( | ... | ) | __riscv_vluxseg2ei64_v_u32mf2(__VA_ARGS__) |
| #define vluxseg2ei64_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u64m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_u64m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_u64m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u64m2 | ( | ... | ) | __riscv_vluxseg2ei64_v_u64m2(__VA_ARGS__) |
| #define vluxseg2ei64_v_u64m2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u64m4 | ( | ... | ) | __riscv_vluxseg2ei64_v_u64m4(__VA_ARGS__) |
| #define vluxseg2ei64_v_u64m4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u8m1 | ( | ... | ) | __riscv_vluxseg2ei64_v_u8m1(__VA_ARGS__) |
| #define vluxseg2ei64_v_u8m1_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u8mf2 | ( | ... | ) | __riscv_vluxseg2ei64_v_u8mf2(__VA_ARGS__) |
| #define vluxseg2ei64_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u8mf4 | ( | ... | ) | __riscv_vluxseg2ei64_v_u8mf4(__VA_ARGS__) |
| #define vluxseg2ei64_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei64_v_u8mf8 | ( | ... | ) | __riscv_vluxseg2ei64_v_u8mf8(__VA_ARGS__) |
| #define vluxseg2ei64_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg2ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_f16m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_f16m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_f16m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16mf2 | ( | ... | ) | __riscv_vluxseg2ei8_v_f16mf2(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16mf4 | ( | ... | ) | __riscv_vluxseg2ei8_v_f16mf4(__VA_ARGS__) |
| #define vluxseg2ei8_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f32m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_f32m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_f32m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f32m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_f32m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_f32m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f32m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_f32m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_f32m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f32mf2 | ( | ... | ) | __riscv_vluxseg2ei8_v_f32mf2(__VA_ARGS__) |
| #define vluxseg2ei8_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f64m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_f64m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_f64m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f64m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_f64m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_f64m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_f64m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_f64m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_f64m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_f64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_i16m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_i16m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_i16m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16mf2 | ( | ... | ) | __riscv_vluxseg2ei8_v_i16mf2(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16mf4 | ( | ... | ) | __riscv_vluxseg2ei8_v_i16mf4(__VA_ARGS__) |
| #define vluxseg2ei8_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i32m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_i32m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_i32m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i32m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_i32m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_i32m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i32m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_i32m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_i32m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i32mf2 | ( | ... | ) | __riscv_vluxseg2ei8_v_i32mf2(__VA_ARGS__) |
| #define vluxseg2ei8_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i64m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_i64m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_i64m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i64m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_i64m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_i64m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i64m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_i64m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_i64m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_i8m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_i8m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_i8m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i8m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8mf2 | ( | ... | ) | __riscv_vluxseg2ei8_v_i8mf2(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8mf4 | ( | ... | ) | __riscv_vluxseg2ei8_v_i8mf4(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8mf8 | ( | ... | ) | __riscv_vluxseg2ei8_v_i8mf8(__VA_ARGS__) |
| #define vluxseg2ei8_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg2ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_u16m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_u16m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_u16m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u16m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16mf2 | ( | ... | ) | __riscv_vluxseg2ei8_v_u16mf2(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16mf4 | ( | ... | ) | __riscv_vluxseg2ei8_v_u16mf4(__VA_ARGS__) |
| #define vluxseg2ei8_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u32m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_u32m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_u32m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u32m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_u32m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_u32m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u32m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_u32m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_u32m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u32m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u32mf2 | ( | ... | ) | __riscv_vluxseg2ei8_v_u32mf2(__VA_ARGS__) |
| #define vluxseg2ei8_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u64m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_u64m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_u64m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u64m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_u64m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_u64m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u64m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_u64m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_u64m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u64m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8m1 | ( | ... | ) | __riscv_vluxseg2ei8_v_u8m1(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8m1_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8m2 | ( | ... | ) | __riscv_vluxseg2ei8_v_u8m2(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8m2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8m4 | ( | ... | ) | __riscv_vluxseg2ei8_v_u8m4(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8m4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u8m4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8mf2 | ( | ... | ) | __riscv_vluxseg2ei8_v_u8mf2(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8mf4 | ( | ... | ) | __riscv_vluxseg2ei8_v_u8mf4(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8mf8 | ( | ... | ) | __riscv_vluxseg2ei8_v_u8mf8(__VA_ARGS__) |
| #define vluxseg2ei8_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg2ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f16m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_f16m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_f16m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f16m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_f16m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_f16m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f16mf2 | ( | ... | ) | __riscv_vluxseg3ei16_v_f16mf2(__VA_ARGS__) |
| #define vluxseg3ei16_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f16mf4 | ( | ... | ) | __riscv_vluxseg3ei16_v_f16mf4(__VA_ARGS__) |
| #define vluxseg3ei16_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f32m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_f32m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_f32m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f32m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_f32m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_f32m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f32mf2 | ( | ... | ) | __riscv_vluxseg3ei16_v_f32mf2(__VA_ARGS__) |
| #define vluxseg3ei16_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f64m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_f64m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_f64m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_f64m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_f64m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_f64m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i16m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_i16m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_i16m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i16m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_i16m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_i16m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i16mf2 | ( | ... | ) | __riscv_vluxseg3ei16_v_i16mf2(__VA_ARGS__) |
| #define vluxseg3ei16_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i16mf4 | ( | ... | ) | __riscv_vluxseg3ei16_v_i16mf4(__VA_ARGS__) |
| #define vluxseg3ei16_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i32m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_i32m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_i32m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i32m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_i32m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_i32m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i32mf2 | ( | ... | ) | __riscv_vluxseg3ei16_v_i32mf2(__VA_ARGS__) |
| #define vluxseg3ei16_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i64m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_i64m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_i64m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i64m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_i64m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_i64m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_i8m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_i8m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8mf2 | ( | ... | ) | __riscv_vluxseg3ei16_v_i8mf2(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8mf4 | ( | ... | ) | __riscv_vluxseg3ei16_v_i8mf4(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8mf8 | ( | ... | ) | __riscv_vluxseg3ei16_v_i8mf8(__VA_ARGS__) |
| #define vluxseg3ei16_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg3ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u16m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_u16m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_u16m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u16m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_u16m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_u16m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u16mf2 | ( | ... | ) | __riscv_vluxseg3ei16_v_u16mf2(__VA_ARGS__) |
| #define vluxseg3ei16_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u16mf4 | ( | ... | ) | __riscv_vluxseg3ei16_v_u16mf4(__VA_ARGS__) |
| #define vluxseg3ei16_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u32m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_u32m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_u32m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u32m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_u32m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_u32m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u32mf2 | ( | ... | ) | __riscv_vluxseg3ei16_v_u32mf2(__VA_ARGS__) |
| #define vluxseg3ei16_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u64m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_u64m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_u64m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u64m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_u64m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_u64m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8m1 | ( | ... | ) | __riscv_vluxseg3ei16_v_u8m1(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8m1_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8m2 | ( | ... | ) | __riscv_vluxseg3ei16_v_u8m2(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8m2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8mf2 | ( | ... | ) | __riscv_vluxseg3ei16_v_u8mf2(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8mf4 | ( | ... | ) | __riscv_vluxseg3ei16_v_u8mf4(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8mf8 | ( | ... | ) | __riscv_vluxseg3ei16_v_u8mf8(__VA_ARGS__) |
| #define vluxseg3ei16_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg3ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f16m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_f16m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_f16m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f16m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_f16m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_f16m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f16mf2 | ( | ... | ) | __riscv_vluxseg3ei32_v_f16mf2(__VA_ARGS__) |
| #define vluxseg3ei32_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f16mf4 | ( | ... | ) | __riscv_vluxseg3ei32_v_f16mf4(__VA_ARGS__) |
| #define vluxseg3ei32_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f32m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_f32m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_f32m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f32m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_f32m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_f32m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f32mf2 | ( | ... | ) | __riscv_vluxseg3ei32_v_f32mf2(__VA_ARGS__) |
| #define vluxseg3ei32_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f64m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_f64m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_f64m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_f64m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_f64m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_f64m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i16m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_i16m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_i16m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i16m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_i16m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_i16m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i16mf2 | ( | ... | ) | __riscv_vluxseg3ei32_v_i16mf2(__VA_ARGS__) |
| #define vluxseg3ei32_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i16mf4 | ( | ... | ) | __riscv_vluxseg3ei32_v_i16mf4(__VA_ARGS__) |
| #define vluxseg3ei32_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i32m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_i32m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_i32m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i32m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_i32m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_i32m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i32mf2 | ( | ... | ) | __riscv_vluxseg3ei32_v_i32mf2(__VA_ARGS__) |
| #define vluxseg3ei32_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i64m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_i64m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_i64m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i64m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_i64m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_i64m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_i8m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_i8m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8mf2 | ( | ... | ) | __riscv_vluxseg3ei32_v_i8mf2(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8mf4 | ( | ... | ) | __riscv_vluxseg3ei32_v_i8mf4(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8mf8 | ( | ... | ) | __riscv_vluxseg3ei32_v_i8mf8(__VA_ARGS__) |
| #define vluxseg3ei32_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg3ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u16m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_u16m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_u16m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u16m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_u16m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_u16m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u16mf2 | ( | ... | ) | __riscv_vluxseg3ei32_v_u16mf2(__VA_ARGS__) |
| #define vluxseg3ei32_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u16mf4 | ( | ... | ) | __riscv_vluxseg3ei32_v_u16mf4(__VA_ARGS__) |
| #define vluxseg3ei32_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u32m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_u32m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_u32m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u32m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_u32m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_u32m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u32mf2 | ( | ... | ) | __riscv_vluxseg3ei32_v_u32mf2(__VA_ARGS__) |
| #define vluxseg3ei32_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u64m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_u64m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_u64m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u64m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_u64m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_u64m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8m1 | ( | ... | ) | __riscv_vluxseg3ei32_v_u8m1(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8m1_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8m2 | ( | ... | ) | __riscv_vluxseg3ei32_v_u8m2(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8m2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8mf2 | ( | ... | ) | __riscv_vluxseg3ei32_v_u8mf2(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8mf4 | ( | ... | ) | __riscv_vluxseg3ei32_v_u8mf4(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8mf8 | ( | ... | ) | __riscv_vluxseg3ei32_v_u8mf8(__VA_ARGS__) |
| #define vluxseg3ei32_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg3ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f16m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_f16m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_f16m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f16m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_f16m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_f16m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f16mf2 | ( | ... | ) | __riscv_vluxseg3ei64_v_f16mf2(__VA_ARGS__) |
| #define vluxseg3ei64_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f16mf4 | ( | ... | ) | __riscv_vluxseg3ei64_v_f16mf4(__VA_ARGS__) |
| #define vluxseg3ei64_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f32m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_f32m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_f32m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f32m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_f32m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_f32m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f32mf2 | ( | ... | ) | __riscv_vluxseg3ei64_v_f32mf2(__VA_ARGS__) |
| #define vluxseg3ei64_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f64m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_f64m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_f64m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_f64m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_f64m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_f64m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i16m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_i16m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_i16m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i16m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_i16m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_i16m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i16mf2 | ( | ... | ) | __riscv_vluxseg3ei64_v_i16mf2(__VA_ARGS__) |
| #define vluxseg3ei64_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i16mf4 | ( | ... | ) | __riscv_vluxseg3ei64_v_i16mf4(__VA_ARGS__) |
| #define vluxseg3ei64_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i32m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_i32m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_i32m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i32m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_i32m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_i32m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i32mf2 | ( | ... | ) | __riscv_vluxseg3ei64_v_i32mf2(__VA_ARGS__) |
| #define vluxseg3ei64_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i64m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_i64m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_i64m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i64m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_i64m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_i64m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i8m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_i8m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_i8m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i8mf2 | ( | ... | ) | __riscv_vluxseg3ei64_v_i8mf2(__VA_ARGS__) |
| #define vluxseg3ei64_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i8mf4 | ( | ... | ) | __riscv_vluxseg3ei64_v_i8mf4(__VA_ARGS__) |
| #define vluxseg3ei64_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_i8mf8 | ( | ... | ) | __riscv_vluxseg3ei64_v_i8mf8(__VA_ARGS__) |
| #define vluxseg3ei64_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg3ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u16m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_u16m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_u16m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u16m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_u16m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_u16m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u16mf2 | ( | ... | ) | __riscv_vluxseg3ei64_v_u16mf2(__VA_ARGS__) |
| #define vluxseg3ei64_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u16mf4 | ( | ... | ) | __riscv_vluxseg3ei64_v_u16mf4(__VA_ARGS__) |
| #define vluxseg3ei64_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u32m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_u32m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_u32m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u32m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_u32m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_u32m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u32mf2 | ( | ... | ) | __riscv_vluxseg3ei64_v_u32mf2(__VA_ARGS__) |
| #define vluxseg3ei64_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u64m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_u64m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_u64m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u64m2 | ( | ... | ) | __riscv_vluxseg3ei64_v_u64m2(__VA_ARGS__) |
| #define vluxseg3ei64_v_u64m2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u8m1 | ( | ... | ) | __riscv_vluxseg3ei64_v_u8m1(__VA_ARGS__) |
| #define vluxseg3ei64_v_u8m1_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u8mf2 | ( | ... | ) | __riscv_vluxseg3ei64_v_u8mf2(__VA_ARGS__) |
| #define vluxseg3ei64_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u8mf4 | ( | ... | ) | __riscv_vluxseg3ei64_v_u8mf4(__VA_ARGS__) |
| #define vluxseg3ei64_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei64_v_u8mf8 | ( | ... | ) | __riscv_vluxseg3ei64_v_u8mf8(__VA_ARGS__) |
| #define vluxseg3ei64_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg3ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f16m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_f16m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_f16m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f16m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_f16m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_f16m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f16mf2 | ( | ... | ) | __riscv_vluxseg3ei8_v_f16mf2(__VA_ARGS__) |
| #define vluxseg3ei8_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f16mf4 | ( | ... | ) | __riscv_vluxseg3ei8_v_f16mf4(__VA_ARGS__) |
| #define vluxseg3ei8_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f32m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_f32m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_f32m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f32m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_f32m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_f32m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f32mf2 | ( | ... | ) | __riscv_vluxseg3ei8_v_f32mf2(__VA_ARGS__) |
| #define vluxseg3ei8_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f64m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_f64m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_f64m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_f64m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_f64m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_f64m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i16m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_i16m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_i16m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i16m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_i16m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_i16m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i16mf2 | ( | ... | ) | __riscv_vluxseg3ei8_v_i16mf2(__VA_ARGS__) |
| #define vluxseg3ei8_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i16mf4 | ( | ... | ) | __riscv_vluxseg3ei8_v_i16mf4(__VA_ARGS__) |
| #define vluxseg3ei8_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i32m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_i32m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_i32m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i32m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_i32m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_i32m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i32mf2 | ( | ... | ) | __riscv_vluxseg3ei8_v_i32mf2(__VA_ARGS__) |
| #define vluxseg3ei8_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i64m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_i64m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_i64m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i64m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_i64m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_i64m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_i8m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_i8m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8mf2 | ( | ... | ) | __riscv_vluxseg3ei8_v_i8mf2(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8mf4 | ( | ... | ) | __riscv_vluxseg3ei8_v_i8mf4(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8mf8 | ( | ... | ) | __riscv_vluxseg3ei8_v_i8mf8(__VA_ARGS__) |
| #define vluxseg3ei8_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg3ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u16m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_u16m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_u16m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u16m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_u16m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_u16m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u16mf2 | ( | ... | ) | __riscv_vluxseg3ei8_v_u16mf2(__VA_ARGS__) |
| #define vluxseg3ei8_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u16mf4 | ( | ... | ) | __riscv_vluxseg3ei8_v_u16mf4(__VA_ARGS__) |
| #define vluxseg3ei8_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u32m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_u32m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_u32m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u32m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_u32m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_u32m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u32mf2 | ( | ... | ) | __riscv_vluxseg3ei8_v_u32mf2(__VA_ARGS__) |
| #define vluxseg3ei8_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u64m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_u64m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_u64m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u64m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_u64m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_u64m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8m1 | ( | ... | ) | __riscv_vluxseg3ei8_v_u8m1(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8m1_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8m2 | ( | ... | ) | __riscv_vluxseg3ei8_v_u8m2(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8m2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8mf2 | ( | ... | ) | __riscv_vluxseg3ei8_v_u8mf2(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8mf4 | ( | ... | ) | __riscv_vluxseg3ei8_v_u8mf4(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8mf8 | ( | ... | ) | __riscv_vluxseg3ei8_v_u8mf8(__VA_ARGS__) |
| #define vluxseg3ei8_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg3ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f16m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_f16m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_f16m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f16m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_f16m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_f16m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f16mf2 | ( | ... | ) | __riscv_vluxseg4ei16_v_f16mf2(__VA_ARGS__) |
| #define vluxseg4ei16_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f16mf4 | ( | ... | ) | __riscv_vluxseg4ei16_v_f16mf4(__VA_ARGS__) |
| #define vluxseg4ei16_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f32m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_f32m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_f32m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f32m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_f32m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_f32m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f32mf2 | ( | ... | ) | __riscv_vluxseg4ei16_v_f32mf2(__VA_ARGS__) |
| #define vluxseg4ei16_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f64m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_f64m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_f64m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_f64m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_f64m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_f64m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i16m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_i16m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_i16m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i16m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_i16m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_i16m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i16mf2 | ( | ... | ) | __riscv_vluxseg4ei16_v_i16mf2(__VA_ARGS__) |
| #define vluxseg4ei16_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i16mf4 | ( | ... | ) | __riscv_vluxseg4ei16_v_i16mf4(__VA_ARGS__) |
| #define vluxseg4ei16_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i32m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_i32m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_i32m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i32m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_i32m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_i32m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i32mf2 | ( | ... | ) | __riscv_vluxseg4ei16_v_i32mf2(__VA_ARGS__) |
| #define vluxseg4ei16_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i64m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_i64m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_i64m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i64m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_i64m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_i64m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_i8m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_i8m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8mf2 | ( | ... | ) | __riscv_vluxseg4ei16_v_i8mf2(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8mf4 | ( | ... | ) | __riscv_vluxseg4ei16_v_i8mf4(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8mf8 | ( | ... | ) | __riscv_vluxseg4ei16_v_i8mf8(__VA_ARGS__) |
| #define vluxseg4ei16_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg4ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u16m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_u16m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_u16m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u16m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_u16m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_u16m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u16mf2 | ( | ... | ) | __riscv_vluxseg4ei16_v_u16mf2(__VA_ARGS__) |
| #define vluxseg4ei16_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u16mf4 | ( | ... | ) | __riscv_vluxseg4ei16_v_u16mf4(__VA_ARGS__) |
| #define vluxseg4ei16_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u32m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_u32m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_u32m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u32m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_u32m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_u32m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u32mf2 | ( | ... | ) | __riscv_vluxseg4ei16_v_u32mf2(__VA_ARGS__) |
| #define vluxseg4ei16_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u64m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_u64m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_u64m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u64m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_u64m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_u64m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8m1 | ( | ... | ) | __riscv_vluxseg4ei16_v_u8m1(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8m1_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8m2 | ( | ... | ) | __riscv_vluxseg4ei16_v_u8m2(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8m2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8mf2 | ( | ... | ) | __riscv_vluxseg4ei16_v_u8mf2(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8mf4 | ( | ... | ) | __riscv_vluxseg4ei16_v_u8mf4(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8mf8 | ( | ... | ) | __riscv_vluxseg4ei16_v_u8mf8(__VA_ARGS__) |
| #define vluxseg4ei16_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg4ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f16m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_f16m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_f16m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f16m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_f16m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_f16m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f16mf2 | ( | ... | ) | __riscv_vluxseg4ei32_v_f16mf2(__VA_ARGS__) |
| #define vluxseg4ei32_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f16mf4 | ( | ... | ) | __riscv_vluxseg4ei32_v_f16mf4(__VA_ARGS__) |
| #define vluxseg4ei32_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f32m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_f32m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_f32m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f32m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_f32m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_f32m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f32mf2 | ( | ... | ) | __riscv_vluxseg4ei32_v_f32mf2(__VA_ARGS__) |
| #define vluxseg4ei32_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f64m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_f64m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_f64m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_f64m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_f64m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_f64m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i16m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_i16m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_i16m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i16m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_i16m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_i16m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i16mf2 | ( | ... | ) | __riscv_vluxseg4ei32_v_i16mf2(__VA_ARGS__) |
| #define vluxseg4ei32_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i16mf4 | ( | ... | ) | __riscv_vluxseg4ei32_v_i16mf4(__VA_ARGS__) |
| #define vluxseg4ei32_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i32m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_i32m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_i32m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i32m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_i32m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_i32m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i32mf2 | ( | ... | ) | __riscv_vluxseg4ei32_v_i32mf2(__VA_ARGS__) |
| #define vluxseg4ei32_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i64m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_i64m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_i64m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i64m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_i64m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_i64m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_i8m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_i8m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8mf2 | ( | ... | ) | __riscv_vluxseg4ei32_v_i8mf2(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8mf4 | ( | ... | ) | __riscv_vluxseg4ei32_v_i8mf4(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8mf8 | ( | ... | ) | __riscv_vluxseg4ei32_v_i8mf8(__VA_ARGS__) |
| #define vluxseg4ei32_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg4ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u16m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_u16m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_u16m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u16m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_u16m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_u16m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u16mf2 | ( | ... | ) | __riscv_vluxseg4ei32_v_u16mf2(__VA_ARGS__) |
| #define vluxseg4ei32_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u16mf4 | ( | ... | ) | __riscv_vluxseg4ei32_v_u16mf4(__VA_ARGS__) |
| #define vluxseg4ei32_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u32m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_u32m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_u32m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u32m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_u32m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_u32m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u32mf2 | ( | ... | ) | __riscv_vluxseg4ei32_v_u32mf2(__VA_ARGS__) |
| #define vluxseg4ei32_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u64m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_u64m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_u64m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u64m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_u64m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_u64m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8m1 | ( | ... | ) | __riscv_vluxseg4ei32_v_u8m1(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8m1_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8m2 | ( | ... | ) | __riscv_vluxseg4ei32_v_u8m2(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8m2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8mf2 | ( | ... | ) | __riscv_vluxseg4ei32_v_u8mf2(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8mf4 | ( | ... | ) | __riscv_vluxseg4ei32_v_u8mf4(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8mf8 | ( | ... | ) | __riscv_vluxseg4ei32_v_u8mf8(__VA_ARGS__) |
| #define vluxseg4ei32_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg4ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f16m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_f16m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_f16m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f16m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_f16m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_f16m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f16mf2 | ( | ... | ) | __riscv_vluxseg4ei64_v_f16mf2(__VA_ARGS__) |
| #define vluxseg4ei64_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f16mf4 | ( | ... | ) | __riscv_vluxseg4ei64_v_f16mf4(__VA_ARGS__) |
| #define vluxseg4ei64_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f32m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_f32m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_f32m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f32m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_f32m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_f32m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f32mf2 | ( | ... | ) | __riscv_vluxseg4ei64_v_f32mf2(__VA_ARGS__) |
| #define vluxseg4ei64_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f64m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_f64m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_f64m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_f64m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_f64m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_f64m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i16m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_i16m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_i16m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i16m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_i16m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_i16m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i16mf2 | ( | ... | ) | __riscv_vluxseg4ei64_v_i16mf2(__VA_ARGS__) |
| #define vluxseg4ei64_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i16mf4 | ( | ... | ) | __riscv_vluxseg4ei64_v_i16mf4(__VA_ARGS__) |
| #define vluxseg4ei64_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i32m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_i32m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_i32m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i32m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_i32m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_i32m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i32mf2 | ( | ... | ) | __riscv_vluxseg4ei64_v_i32mf2(__VA_ARGS__) |
| #define vluxseg4ei64_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i64m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_i64m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_i64m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i64m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_i64m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_i64m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i8m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_i8m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_i8m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i8mf2 | ( | ... | ) | __riscv_vluxseg4ei64_v_i8mf2(__VA_ARGS__) |
| #define vluxseg4ei64_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i8mf4 | ( | ... | ) | __riscv_vluxseg4ei64_v_i8mf4(__VA_ARGS__) |
| #define vluxseg4ei64_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_i8mf8 | ( | ... | ) | __riscv_vluxseg4ei64_v_i8mf8(__VA_ARGS__) |
| #define vluxseg4ei64_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg4ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u16m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_u16m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_u16m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u16m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_u16m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_u16m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u16mf2 | ( | ... | ) | __riscv_vluxseg4ei64_v_u16mf2(__VA_ARGS__) |
| #define vluxseg4ei64_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u16mf4 | ( | ... | ) | __riscv_vluxseg4ei64_v_u16mf4(__VA_ARGS__) |
| #define vluxseg4ei64_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u32m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_u32m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_u32m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u32m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_u32m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_u32m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u32mf2 | ( | ... | ) | __riscv_vluxseg4ei64_v_u32mf2(__VA_ARGS__) |
| #define vluxseg4ei64_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u64m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_u64m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_u64m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u64m2 | ( | ... | ) | __riscv_vluxseg4ei64_v_u64m2(__VA_ARGS__) |
| #define vluxseg4ei64_v_u64m2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u8m1 | ( | ... | ) | __riscv_vluxseg4ei64_v_u8m1(__VA_ARGS__) |
| #define vluxseg4ei64_v_u8m1_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u8mf2 | ( | ... | ) | __riscv_vluxseg4ei64_v_u8mf2(__VA_ARGS__) |
| #define vluxseg4ei64_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u8mf4 | ( | ... | ) | __riscv_vluxseg4ei64_v_u8mf4(__VA_ARGS__) |
| #define vluxseg4ei64_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei64_v_u8mf8 | ( | ... | ) | __riscv_vluxseg4ei64_v_u8mf8(__VA_ARGS__) |
| #define vluxseg4ei64_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg4ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f16m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_f16m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_f16m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f16m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_f16m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_f16m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f16mf2 | ( | ... | ) | __riscv_vluxseg4ei8_v_f16mf2(__VA_ARGS__) |
| #define vluxseg4ei8_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f16mf4 | ( | ... | ) | __riscv_vluxseg4ei8_v_f16mf4(__VA_ARGS__) |
| #define vluxseg4ei8_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f32m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_f32m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_f32m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f32m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_f32m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_f32m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f32mf2 | ( | ... | ) | __riscv_vluxseg4ei8_v_f32mf2(__VA_ARGS__) |
| #define vluxseg4ei8_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f64m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_f64m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_f64m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_f64m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_f64m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_f64m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_f64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i16m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_i16m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_i16m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i16m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_i16m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_i16m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i16mf2 | ( | ... | ) | __riscv_vluxseg4ei8_v_i16mf2(__VA_ARGS__) |
| #define vluxseg4ei8_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i16mf4 | ( | ... | ) | __riscv_vluxseg4ei8_v_i16mf4(__VA_ARGS__) |
| #define vluxseg4ei8_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i32m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_i32m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_i32m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i32m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_i32m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_i32m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i32mf2 | ( | ... | ) | __riscv_vluxseg4ei8_v_i32mf2(__VA_ARGS__) |
| #define vluxseg4ei8_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i64m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_i64m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_i64m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i64m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_i64m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_i64m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_i8m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_i8m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i8m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8mf2 | ( | ... | ) | __riscv_vluxseg4ei8_v_i8mf2(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8mf4 | ( | ... | ) | __riscv_vluxseg4ei8_v_i8mf4(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8mf8 | ( | ... | ) | __riscv_vluxseg4ei8_v_i8mf8(__VA_ARGS__) |
| #define vluxseg4ei8_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg4ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u16m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_u16m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_u16m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u16m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_u16m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_u16m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u16m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u16mf2 | ( | ... | ) | __riscv_vluxseg4ei8_v_u16mf2(__VA_ARGS__) |
| #define vluxseg4ei8_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u16mf4 | ( | ... | ) | __riscv_vluxseg4ei8_v_u16mf4(__VA_ARGS__) |
| #define vluxseg4ei8_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u32m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_u32m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_u32m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u32m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_u32m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_u32m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u32m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u32mf2 | ( | ... | ) | __riscv_vluxseg4ei8_v_u32mf2(__VA_ARGS__) |
| #define vluxseg4ei8_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u64m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_u64m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_u64m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u64m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_u64m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_u64m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u64m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8m1 | ( | ... | ) | __riscv_vluxseg4ei8_v_u8m1(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8m1_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8m2 | ( | ... | ) | __riscv_vluxseg4ei8_v_u8m2(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8m2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u8m2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8mf2 | ( | ... | ) | __riscv_vluxseg4ei8_v_u8mf2(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8mf4 | ( | ... | ) | __riscv_vluxseg4ei8_v_u8mf4(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8mf8 | ( | ... | ) | __riscv_vluxseg4ei8_v_u8mf8(__VA_ARGS__) |
| #define vluxseg4ei8_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg4ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_f16m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_f16m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_f16m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_f16mf2 | ( | ... | ) | __riscv_vluxseg5ei16_v_f16mf2(__VA_ARGS__) |
| #define vluxseg5ei16_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg5ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_f16mf4 | ( | ... | ) | __riscv_vluxseg5ei16_v_f16mf4(__VA_ARGS__) |
| #define vluxseg5ei16_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg5ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_f32m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_f32m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_f32m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_f32mf2 | ( | ... | ) | __riscv_vluxseg5ei16_v_f32mf2(__VA_ARGS__) |
| #define vluxseg5ei16_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg5ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_f64m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_f64m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_f64m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i16m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_i16m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_i16m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i16mf2 | ( | ... | ) | __riscv_vluxseg5ei16_v_i16mf2(__VA_ARGS__) |
| #define vluxseg5ei16_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i16mf4 | ( | ... | ) | __riscv_vluxseg5ei16_v_i16mf4(__VA_ARGS__) |
| #define vluxseg5ei16_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i32m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_i32m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_i32m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i32mf2 | ( | ... | ) | __riscv_vluxseg5ei16_v_i32mf2(__VA_ARGS__) |
| #define vluxseg5ei16_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i64m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_i64m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_i64m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i8m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_i8m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_i8m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i8mf2 | ( | ... | ) | __riscv_vluxseg5ei16_v_i8mf2(__VA_ARGS__) |
| #define vluxseg5ei16_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i8mf4 | ( | ... | ) | __riscv_vluxseg5ei16_v_i8mf4(__VA_ARGS__) |
| #define vluxseg5ei16_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_i8mf8 | ( | ... | ) | __riscv_vluxseg5ei16_v_i8mf8(__VA_ARGS__) |
| #define vluxseg5ei16_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg5ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u16m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_u16m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_u16m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u16mf2 | ( | ... | ) | __riscv_vluxseg5ei16_v_u16mf2(__VA_ARGS__) |
| #define vluxseg5ei16_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u16mf4 | ( | ... | ) | __riscv_vluxseg5ei16_v_u16mf4(__VA_ARGS__) |
| #define vluxseg5ei16_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u32m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_u32m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_u32m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u32mf2 | ( | ... | ) | __riscv_vluxseg5ei16_v_u32mf2(__VA_ARGS__) |
| #define vluxseg5ei16_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u64m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_u64m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_u64m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u8m1 | ( | ... | ) | __riscv_vluxseg5ei16_v_u8m1(__VA_ARGS__) |
| #define vluxseg5ei16_v_u8m1_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u8mf2 | ( | ... | ) | __riscv_vluxseg5ei16_v_u8mf2(__VA_ARGS__) |
| #define vluxseg5ei16_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u8mf4 | ( | ... | ) | __riscv_vluxseg5ei16_v_u8mf4(__VA_ARGS__) |
| #define vluxseg5ei16_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei16_v_u8mf8 | ( | ... | ) | __riscv_vluxseg5ei16_v_u8mf8(__VA_ARGS__) |
| #define vluxseg5ei16_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg5ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_f16m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_f16m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_f16m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_f16mf2 | ( | ... | ) | __riscv_vluxseg5ei32_v_f16mf2(__VA_ARGS__) |
| #define vluxseg5ei32_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg5ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_f16mf4 | ( | ... | ) | __riscv_vluxseg5ei32_v_f16mf4(__VA_ARGS__) |
| #define vluxseg5ei32_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg5ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_f32m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_f32m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_f32m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_f32mf2 | ( | ... | ) | __riscv_vluxseg5ei32_v_f32mf2(__VA_ARGS__) |
| #define vluxseg5ei32_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg5ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_f64m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_f64m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_f64m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i16m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_i16m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_i16m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i16mf2 | ( | ... | ) | __riscv_vluxseg5ei32_v_i16mf2(__VA_ARGS__) |
| #define vluxseg5ei32_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i16mf4 | ( | ... | ) | __riscv_vluxseg5ei32_v_i16mf4(__VA_ARGS__) |
| #define vluxseg5ei32_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i32m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_i32m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_i32m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i32mf2 | ( | ... | ) | __riscv_vluxseg5ei32_v_i32mf2(__VA_ARGS__) |
| #define vluxseg5ei32_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i64m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_i64m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_i64m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i8m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_i8m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_i8m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i8mf2 | ( | ... | ) | __riscv_vluxseg5ei32_v_i8mf2(__VA_ARGS__) |
| #define vluxseg5ei32_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i8mf4 | ( | ... | ) | __riscv_vluxseg5ei32_v_i8mf4(__VA_ARGS__) |
| #define vluxseg5ei32_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_i8mf8 | ( | ... | ) | __riscv_vluxseg5ei32_v_i8mf8(__VA_ARGS__) |
| #define vluxseg5ei32_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg5ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u16m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_u16m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_u16m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u16mf2 | ( | ... | ) | __riscv_vluxseg5ei32_v_u16mf2(__VA_ARGS__) |
| #define vluxseg5ei32_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u16mf4 | ( | ... | ) | __riscv_vluxseg5ei32_v_u16mf4(__VA_ARGS__) |
| #define vluxseg5ei32_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u32m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_u32m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_u32m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u32mf2 | ( | ... | ) | __riscv_vluxseg5ei32_v_u32mf2(__VA_ARGS__) |
| #define vluxseg5ei32_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u64m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_u64m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_u64m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u8m1 | ( | ... | ) | __riscv_vluxseg5ei32_v_u8m1(__VA_ARGS__) |
| #define vluxseg5ei32_v_u8m1_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u8mf2 | ( | ... | ) | __riscv_vluxseg5ei32_v_u8mf2(__VA_ARGS__) |
| #define vluxseg5ei32_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u8mf4 | ( | ... | ) | __riscv_vluxseg5ei32_v_u8mf4(__VA_ARGS__) |
| #define vluxseg5ei32_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei32_v_u8mf8 | ( | ... | ) | __riscv_vluxseg5ei32_v_u8mf8(__VA_ARGS__) |
| #define vluxseg5ei32_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg5ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_f16m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_f16m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_f16m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_f16mf2 | ( | ... | ) | __riscv_vluxseg5ei64_v_f16mf2(__VA_ARGS__) |
| #define vluxseg5ei64_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg5ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_f16mf4 | ( | ... | ) | __riscv_vluxseg5ei64_v_f16mf4(__VA_ARGS__) |
| #define vluxseg5ei64_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg5ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_f32m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_f32m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_f32m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_f32mf2 | ( | ... | ) | __riscv_vluxseg5ei64_v_f32mf2(__VA_ARGS__) |
| #define vluxseg5ei64_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg5ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_f64m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_f64m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_f64m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i16m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_i16m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_i16m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i16mf2 | ( | ... | ) | __riscv_vluxseg5ei64_v_i16mf2(__VA_ARGS__) |
| #define vluxseg5ei64_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i16mf4 | ( | ... | ) | __riscv_vluxseg5ei64_v_i16mf4(__VA_ARGS__) |
| #define vluxseg5ei64_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i32m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_i32m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_i32m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i32mf2 | ( | ... | ) | __riscv_vluxseg5ei64_v_i32mf2(__VA_ARGS__) |
| #define vluxseg5ei64_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i64m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_i64m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_i64m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i8m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_i8m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_i8m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i8mf2 | ( | ... | ) | __riscv_vluxseg5ei64_v_i8mf2(__VA_ARGS__) |
| #define vluxseg5ei64_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i8mf4 | ( | ... | ) | __riscv_vluxseg5ei64_v_i8mf4(__VA_ARGS__) |
| #define vluxseg5ei64_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_i8mf8 | ( | ... | ) | __riscv_vluxseg5ei64_v_i8mf8(__VA_ARGS__) |
| #define vluxseg5ei64_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg5ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u16m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_u16m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_u16m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u16mf2 | ( | ... | ) | __riscv_vluxseg5ei64_v_u16mf2(__VA_ARGS__) |
| #define vluxseg5ei64_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u16mf4 | ( | ... | ) | __riscv_vluxseg5ei64_v_u16mf4(__VA_ARGS__) |
| #define vluxseg5ei64_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u32m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_u32m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_u32m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u32mf2 | ( | ... | ) | __riscv_vluxseg5ei64_v_u32mf2(__VA_ARGS__) |
| #define vluxseg5ei64_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u64m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_u64m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_u64m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u8m1 | ( | ... | ) | __riscv_vluxseg5ei64_v_u8m1(__VA_ARGS__) |
| #define vluxseg5ei64_v_u8m1_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u8mf2 | ( | ... | ) | __riscv_vluxseg5ei64_v_u8mf2(__VA_ARGS__) |
| #define vluxseg5ei64_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u8mf4 | ( | ... | ) | __riscv_vluxseg5ei64_v_u8mf4(__VA_ARGS__) |
| #define vluxseg5ei64_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei64_v_u8mf8 | ( | ... | ) | __riscv_vluxseg5ei64_v_u8mf8(__VA_ARGS__) |
| #define vluxseg5ei64_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg5ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_f16m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_f16m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_f16m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_f16mf2 | ( | ... | ) | __riscv_vluxseg5ei8_v_f16mf2(__VA_ARGS__) |
| #define vluxseg5ei8_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg5ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_f16mf4 | ( | ... | ) | __riscv_vluxseg5ei8_v_f16mf4(__VA_ARGS__) |
| #define vluxseg5ei8_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg5ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_f32m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_f32m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_f32m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_f32mf2 | ( | ... | ) | __riscv_vluxseg5ei8_v_f32mf2(__VA_ARGS__) |
| #define vluxseg5ei8_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg5ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_f64m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_f64m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_f64m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i16m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_i16m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_i16m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i16mf2 | ( | ... | ) | __riscv_vluxseg5ei8_v_i16mf2(__VA_ARGS__) |
| #define vluxseg5ei8_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i16mf4 | ( | ... | ) | __riscv_vluxseg5ei8_v_i16mf4(__VA_ARGS__) |
| #define vluxseg5ei8_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i32m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_i32m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_i32m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i32mf2 | ( | ... | ) | __riscv_vluxseg5ei8_v_i32mf2(__VA_ARGS__) |
| #define vluxseg5ei8_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i64m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_i64m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_i64m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i8m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_i8m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_i8m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i8mf2 | ( | ... | ) | __riscv_vluxseg5ei8_v_i8mf2(__VA_ARGS__) |
| #define vluxseg5ei8_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i8mf4 | ( | ... | ) | __riscv_vluxseg5ei8_v_i8mf4(__VA_ARGS__) |
| #define vluxseg5ei8_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_i8mf8 | ( | ... | ) | __riscv_vluxseg5ei8_v_i8mf8(__VA_ARGS__) |
| #define vluxseg5ei8_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg5ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u16m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_u16m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_u16m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u16mf2 | ( | ... | ) | __riscv_vluxseg5ei8_v_u16mf2(__VA_ARGS__) |
| #define vluxseg5ei8_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u16mf4 | ( | ... | ) | __riscv_vluxseg5ei8_v_u16mf4(__VA_ARGS__) |
| #define vluxseg5ei8_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u32m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_u32m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_u32m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u32mf2 | ( | ... | ) | __riscv_vluxseg5ei8_v_u32mf2(__VA_ARGS__) |
| #define vluxseg5ei8_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u64m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_u64m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_u64m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u8m1 | ( | ... | ) | __riscv_vluxseg5ei8_v_u8m1(__VA_ARGS__) |
| #define vluxseg5ei8_v_u8m1_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u8mf2 | ( | ... | ) | __riscv_vluxseg5ei8_v_u8mf2(__VA_ARGS__) |
| #define vluxseg5ei8_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u8mf4 | ( | ... | ) | __riscv_vluxseg5ei8_v_u8mf4(__VA_ARGS__) |
| #define vluxseg5ei8_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg5ei8_v_u8mf8 | ( | ... | ) | __riscv_vluxseg5ei8_v_u8mf8(__VA_ARGS__) |
| #define vluxseg5ei8_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg5ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_f16m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_f16m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_f16m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_f16mf2 | ( | ... | ) | __riscv_vluxseg6ei16_v_f16mf2(__VA_ARGS__) |
| #define vluxseg6ei16_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg6ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_f16mf4 | ( | ... | ) | __riscv_vluxseg6ei16_v_f16mf4(__VA_ARGS__) |
| #define vluxseg6ei16_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg6ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_f32m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_f32m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_f32m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_f32mf2 | ( | ... | ) | __riscv_vluxseg6ei16_v_f32mf2(__VA_ARGS__) |
| #define vluxseg6ei16_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg6ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_f64m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_f64m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_f64m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i16m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_i16m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_i16m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i16mf2 | ( | ... | ) | __riscv_vluxseg6ei16_v_i16mf2(__VA_ARGS__) |
| #define vluxseg6ei16_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i16mf4 | ( | ... | ) | __riscv_vluxseg6ei16_v_i16mf4(__VA_ARGS__) |
| #define vluxseg6ei16_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i32m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_i32m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_i32m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i32mf2 | ( | ... | ) | __riscv_vluxseg6ei16_v_i32mf2(__VA_ARGS__) |
| #define vluxseg6ei16_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i64m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_i64m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_i64m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i8m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_i8m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_i8m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i8mf2 | ( | ... | ) | __riscv_vluxseg6ei16_v_i8mf2(__VA_ARGS__) |
| #define vluxseg6ei16_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i8mf4 | ( | ... | ) | __riscv_vluxseg6ei16_v_i8mf4(__VA_ARGS__) |
| #define vluxseg6ei16_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_i8mf8 | ( | ... | ) | __riscv_vluxseg6ei16_v_i8mf8(__VA_ARGS__) |
| #define vluxseg6ei16_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg6ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u16m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_u16m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_u16m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u16mf2 | ( | ... | ) | __riscv_vluxseg6ei16_v_u16mf2(__VA_ARGS__) |
| #define vluxseg6ei16_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u16mf4 | ( | ... | ) | __riscv_vluxseg6ei16_v_u16mf4(__VA_ARGS__) |
| #define vluxseg6ei16_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u32m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_u32m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_u32m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u32mf2 | ( | ... | ) | __riscv_vluxseg6ei16_v_u32mf2(__VA_ARGS__) |
| #define vluxseg6ei16_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u64m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_u64m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_u64m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u8m1 | ( | ... | ) | __riscv_vluxseg6ei16_v_u8m1(__VA_ARGS__) |
| #define vluxseg6ei16_v_u8m1_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u8mf2 | ( | ... | ) | __riscv_vluxseg6ei16_v_u8mf2(__VA_ARGS__) |
| #define vluxseg6ei16_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u8mf4 | ( | ... | ) | __riscv_vluxseg6ei16_v_u8mf4(__VA_ARGS__) |
| #define vluxseg6ei16_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei16_v_u8mf8 | ( | ... | ) | __riscv_vluxseg6ei16_v_u8mf8(__VA_ARGS__) |
| #define vluxseg6ei16_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg6ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_f16m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_f16m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_f16m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_f16mf2 | ( | ... | ) | __riscv_vluxseg6ei32_v_f16mf2(__VA_ARGS__) |
| #define vluxseg6ei32_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg6ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_f16mf4 | ( | ... | ) | __riscv_vluxseg6ei32_v_f16mf4(__VA_ARGS__) |
| #define vluxseg6ei32_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg6ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_f32m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_f32m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_f32m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_f32mf2 | ( | ... | ) | __riscv_vluxseg6ei32_v_f32mf2(__VA_ARGS__) |
| #define vluxseg6ei32_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg6ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_f64m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_f64m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_f64m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i16m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_i16m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_i16m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i16mf2 | ( | ... | ) | __riscv_vluxseg6ei32_v_i16mf2(__VA_ARGS__) |
| #define vluxseg6ei32_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i16mf4 | ( | ... | ) | __riscv_vluxseg6ei32_v_i16mf4(__VA_ARGS__) |
| #define vluxseg6ei32_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i32m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_i32m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_i32m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i32mf2 | ( | ... | ) | __riscv_vluxseg6ei32_v_i32mf2(__VA_ARGS__) |
| #define vluxseg6ei32_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i64m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_i64m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_i64m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i8m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_i8m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_i8m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i8mf2 | ( | ... | ) | __riscv_vluxseg6ei32_v_i8mf2(__VA_ARGS__) |
| #define vluxseg6ei32_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i8mf4 | ( | ... | ) | __riscv_vluxseg6ei32_v_i8mf4(__VA_ARGS__) |
| #define vluxseg6ei32_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_i8mf8 | ( | ... | ) | __riscv_vluxseg6ei32_v_i8mf8(__VA_ARGS__) |
| #define vluxseg6ei32_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg6ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u16m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_u16m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_u16m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u16mf2 | ( | ... | ) | __riscv_vluxseg6ei32_v_u16mf2(__VA_ARGS__) |
| #define vluxseg6ei32_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u16mf4 | ( | ... | ) | __riscv_vluxseg6ei32_v_u16mf4(__VA_ARGS__) |
| #define vluxseg6ei32_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u32m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_u32m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_u32m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u32mf2 | ( | ... | ) | __riscv_vluxseg6ei32_v_u32mf2(__VA_ARGS__) |
| #define vluxseg6ei32_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u64m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_u64m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_u64m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u8m1 | ( | ... | ) | __riscv_vluxseg6ei32_v_u8m1(__VA_ARGS__) |
| #define vluxseg6ei32_v_u8m1_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u8mf2 | ( | ... | ) | __riscv_vluxseg6ei32_v_u8mf2(__VA_ARGS__) |
| #define vluxseg6ei32_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u8mf4 | ( | ... | ) | __riscv_vluxseg6ei32_v_u8mf4(__VA_ARGS__) |
| #define vluxseg6ei32_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei32_v_u8mf8 | ( | ... | ) | __riscv_vluxseg6ei32_v_u8mf8(__VA_ARGS__) |
| #define vluxseg6ei32_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg6ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_f16m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_f16m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_f16m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_f16mf2 | ( | ... | ) | __riscv_vluxseg6ei64_v_f16mf2(__VA_ARGS__) |
| #define vluxseg6ei64_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg6ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_f16mf4 | ( | ... | ) | __riscv_vluxseg6ei64_v_f16mf4(__VA_ARGS__) |
| #define vluxseg6ei64_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg6ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_f32m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_f32m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_f32m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_f32mf2 | ( | ... | ) | __riscv_vluxseg6ei64_v_f32mf2(__VA_ARGS__) |
| #define vluxseg6ei64_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg6ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_f64m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_f64m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_f64m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i16m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_i16m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_i16m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i16mf2 | ( | ... | ) | __riscv_vluxseg6ei64_v_i16mf2(__VA_ARGS__) |
| #define vluxseg6ei64_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i16mf4 | ( | ... | ) | __riscv_vluxseg6ei64_v_i16mf4(__VA_ARGS__) |
| #define vluxseg6ei64_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i32m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_i32m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_i32m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i32mf2 | ( | ... | ) | __riscv_vluxseg6ei64_v_i32mf2(__VA_ARGS__) |
| #define vluxseg6ei64_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i64m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_i64m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_i64m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i8m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_i8m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_i8m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i8mf2 | ( | ... | ) | __riscv_vluxseg6ei64_v_i8mf2(__VA_ARGS__) |
| #define vluxseg6ei64_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i8mf4 | ( | ... | ) | __riscv_vluxseg6ei64_v_i8mf4(__VA_ARGS__) |
| #define vluxseg6ei64_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_i8mf8 | ( | ... | ) | __riscv_vluxseg6ei64_v_i8mf8(__VA_ARGS__) |
| #define vluxseg6ei64_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg6ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u16m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_u16m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_u16m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u16mf2 | ( | ... | ) | __riscv_vluxseg6ei64_v_u16mf2(__VA_ARGS__) |
| #define vluxseg6ei64_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u16mf4 | ( | ... | ) | __riscv_vluxseg6ei64_v_u16mf4(__VA_ARGS__) |
| #define vluxseg6ei64_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u32m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_u32m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_u32m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u32mf2 | ( | ... | ) | __riscv_vluxseg6ei64_v_u32mf2(__VA_ARGS__) |
| #define vluxseg6ei64_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u64m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_u64m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_u64m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u8m1 | ( | ... | ) | __riscv_vluxseg6ei64_v_u8m1(__VA_ARGS__) |
| #define vluxseg6ei64_v_u8m1_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u8mf2 | ( | ... | ) | __riscv_vluxseg6ei64_v_u8mf2(__VA_ARGS__) |
| #define vluxseg6ei64_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u8mf4 | ( | ... | ) | __riscv_vluxseg6ei64_v_u8mf4(__VA_ARGS__) |
| #define vluxseg6ei64_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei64_v_u8mf8 | ( | ... | ) | __riscv_vluxseg6ei64_v_u8mf8(__VA_ARGS__) |
| #define vluxseg6ei64_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg6ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_f16m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_f16m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_f16m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_f16mf2 | ( | ... | ) | __riscv_vluxseg6ei8_v_f16mf2(__VA_ARGS__) |
| #define vluxseg6ei8_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg6ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_f16mf4 | ( | ... | ) | __riscv_vluxseg6ei8_v_f16mf4(__VA_ARGS__) |
| #define vluxseg6ei8_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg6ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_f32m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_f32m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_f32m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_f32mf2 | ( | ... | ) | __riscv_vluxseg6ei8_v_f32mf2(__VA_ARGS__) |
| #define vluxseg6ei8_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg6ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_f64m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_f64m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_f64m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i16m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_i16m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_i16m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i16mf2 | ( | ... | ) | __riscv_vluxseg6ei8_v_i16mf2(__VA_ARGS__) |
| #define vluxseg6ei8_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i16mf4 | ( | ... | ) | __riscv_vluxseg6ei8_v_i16mf4(__VA_ARGS__) |
| #define vluxseg6ei8_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i32m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_i32m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_i32m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i32mf2 | ( | ... | ) | __riscv_vluxseg6ei8_v_i32mf2(__VA_ARGS__) |
| #define vluxseg6ei8_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i64m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_i64m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_i64m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i8m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_i8m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_i8m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i8mf2 | ( | ... | ) | __riscv_vluxseg6ei8_v_i8mf2(__VA_ARGS__) |
| #define vluxseg6ei8_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i8mf4 | ( | ... | ) | __riscv_vluxseg6ei8_v_i8mf4(__VA_ARGS__) |
| #define vluxseg6ei8_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_i8mf8 | ( | ... | ) | __riscv_vluxseg6ei8_v_i8mf8(__VA_ARGS__) |
| #define vluxseg6ei8_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg6ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u16m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_u16m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_u16m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u16mf2 | ( | ... | ) | __riscv_vluxseg6ei8_v_u16mf2(__VA_ARGS__) |
| #define vluxseg6ei8_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u16mf4 | ( | ... | ) | __riscv_vluxseg6ei8_v_u16mf4(__VA_ARGS__) |
| #define vluxseg6ei8_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u32m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_u32m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_u32m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u32mf2 | ( | ... | ) | __riscv_vluxseg6ei8_v_u32mf2(__VA_ARGS__) |
| #define vluxseg6ei8_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u64m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_u64m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_u64m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u8m1 | ( | ... | ) | __riscv_vluxseg6ei8_v_u8m1(__VA_ARGS__) |
| #define vluxseg6ei8_v_u8m1_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u8mf2 | ( | ... | ) | __riscv_vluxseg6ei8_v_u8mf2(__VA_ARGS__) |
| #define vluxseg6ei8_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u8mf4 | ( | ... | ) | __riscv_vluxseg6ei8_v_u8mf4(__VA_ARGS__) |
| #define vluxseg6ei8_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg6ei8_v_u8mf8 | ( | ... | ) | __riscv_vluxseg6ei8_v_u8mf8(__VA_ARGS__) |
| #define vluxseg6ei8_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg6ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_f16m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_f16m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_f16m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_f16mf2 | ( | ... | ) | __riscv_vluxseg7ei16_v_f16mf2(__VA_ARGS__) |
| #define vluxseg7ei16_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg7ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_f16mf4 | ( | ... | ) | __riscv_vluxseg7ei16_v_f16mf4(__VA_ARGS__) |
| #define vluxseg7ei16_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg7ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_f32m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_f32m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_f32m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_f32mf2 | ( | ... | ) | __riscv_vluxseg7ei16_v_f32mf2(__VA_ARGS__) |
| #define vluxseg7ei16_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg7ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_f64m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_f64m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_f64m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i16m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_i16m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_i16m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i16mf2 | ( | ... | ) | __riscv_vluxseg7ei16_v_i16mf2(__VA_ARGS__) |
| #define vluxseg7ei16_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i16mf4 | ( | ... | ) | __riscv_vluxseg7ei16_v_i16mf4(__VA_ARGS__) |
| #define vluxseg7ei16_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i32m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_i32m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_i32m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i32mf2 | ( | ... | ) | __riscv_vluxseg7ei16_v_i32mf2(__VA_ARGS__) |
| #define vluxseg7ei16_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i64m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_i64m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_i64m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i8m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_i8m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_i8m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i8mf2 | ( | ... | ) | __riscv_vluxseg7ei16_v_i8mf2(__VA_ARGS__) |
| #define vluxseg7ei16_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i8mf4 | ( | ... | ) | __riscv_vluxseg7ei16_v_i8mf4(__VA_ARGS__) |
| #define vluxseg7ei16_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_i8mf8 | ( | ... | ) | __riscv_vluxseg7ei16_v_i8mf8(__VA_ARGS__) |
| #define vluxseg7ei16_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg7ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u16m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_u16m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_u16m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u16mf2 | ( | ... | ) | __riscv_vluxseg7ei16_v_u16mf2(__VA_ARGS__) |
| #define vluxseg7ei16_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u16mf4 | ( | ... | ) | __riscv_vluxseg7ei16_v_u16mf4(__VA_ARGS__) |
| #define vluxseg7ei16_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u32m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_u32m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_u32m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u32mf2 | ( | ... | ) | __riscv_vluxseg7ei16_v_u32mf2(__VA_ARGS__) |
| #define vluxseg7ei16_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u64m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_u64m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_u64m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u8m1 | ( | ... | ) | __riscv_vluxseg7ei16_v_u8m1(__VA_ARGS__) |
| #define vluxseg7ei16_v_u8m1_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u8mf2 | ( | ... | ) | __riscv_vluxseg7ei16_v_u8mf2(__VA_ARGS__) |
| #define vluxseg7ei16_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u8mf4 | ( | ... | ) | __riscv_vluxseg7ei16_v_u8mf4(__VA_ARGS__) |
| #define vluxseg7ei16_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei16_v_u8mf8 | ( | ... | ) | __riscv_vluxseg7ei16_v_u8mf8(__VA_ARGS__) |
| #define vluxseg7ei16_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg7ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_f16m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_f16m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_f16m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_f16mf2 | ( | ... | ) | __riscv_vluxseg7ei32_v_f16mf2(__VA_ARGS__) |
| #define vluxseg7ei32_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg7ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_f16mf4 | ( | ... | ) | __riscv_vluxseg7ei32_v_f16mf4(__VA_ARGS__) |
| #define vluxseg7ei32_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg7ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_f32m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_f32m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_f32m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_f32mf2 | ( | ... | ) | __riscv_vluxseg7ei32_v_f32mf2(__VA_ARGS__) |
| #define vluxseg7ei32_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg7ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_f64m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_f64m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_f64m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i16m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_i16m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_i16m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i16mf2 | ( | ... | ) | __riscv_vluxseg7ei32_v_i16mf2(__VA_ARGS__) |
| #define vluxseg7ei32_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i16mf4 | ( | ... | ) | __riscv_vluxseg7ei32_v_i16mf4(__VA_ARGS__) |
| #define vluxseg7ei32_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i32m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_i32m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_i32m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i32mf2 | ( | ... | ) | __riscv_vluxseg7ei32_v_i32mf2(__VA_ARGS__) |
| #define vluxseg7ei32_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i64m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_i64m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_i64m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i8m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_i8m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_i8m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i8mf2 | ( | ... | ) | __riscv_vluxseg7ei32_v_i8mf2(__VA_ARGS__) |
| #define vluxseg7ei32_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i8mf4 | ( | ... | ) | __riscv_vluxseg7ei32_v_i8mf4(__VA_ARGS__) |
| #define vluxseg7ei32_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_i8mf8 | ( | ... | ) | __riscv_vluxseg7ei32_v_i8mf8(__VA_ARGS__) |
| #define vluxseg7ei32_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg7ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u16m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_u16m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_u16m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u16mf2 | ( | ... | ) | __riscv_vluxseg7ei32_v_u16mf2(__VA_ARGS__) |
| #define vluxseg7ei32_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u16mf4 | ( | ... | ) | __riscv_vluxseg7ei32_v_u16mf4(__VA_ARGS__) |
| #define vluxseg7ei32_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u32m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_u32m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_u32m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u32mf2 | ( | ... | ) | __riscv_vluxseg7ei32_v_u32mf2(__VA_ARGS__) |
| #define vluxseg7ei32_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u64m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_u64m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_u64m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u8m1 | ( | ... | ) | __riscv_vluxseg7ei32_v_u8m1(__VA_ARGS__) |
| #define vluxseg7ei32_v_u8m1_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u8mf2 | ( | ... | ) | __riscv_vluxseg7ei32_v_u8mf2(__VA_ARGS__) |
| #define vluxseg7ei32_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u8mf4 | ( | ... | ) | __riscv_vluxseg7ei32_v_u8mf4(__VA_ARGS__) |
| #define vluxseg7ei32_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei32_v_u8mf8 | ( | ... | ) | __riscv_vluxseg7ei32_v_u8mf8(__VA_ARGS__) |
| #define vluxseg7ei32_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg7ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_f16m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_f16m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_f16m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_f16mf2 | ( | ... | ) | __riscv_vluxseg7ei64_v_f16mf2(__VA_ARGS__) |
| #define vluxseg7ei64_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg7ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_f16mf4 | ( | ... | ) | __riscv_vluxseg7ei64_v_f16mf4(__VA_ARGS__) |
| #define vluxseg7ei64_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg7ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_f32m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_f32m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_f32m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_f32mf2 | ( | ... | ) | __riscv_vluxseg7ei64_v_f32mf2(__VA_ARGS__) |
| #define vluxseg7ei64_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg7ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_f64m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_f64m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_f64m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i16m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_i16m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_i16m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i16mf2 | ( | ... | ) | __riscv_vluxseg7ei64_v_i16mf2(__VA_ARGS__) |
| #define vluxseg7ei64_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i16mf4 | ( | ... | ) | __riscv_vluxseg7ei64_v_i16mf4(__VA_ARGS__) |
| #define vluxseg7ei64_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i32m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_i32m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_i32m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i32mf2 | ( | ... | ) | __riscv_vluxseg7ei64_v_i32mf2(__VA_ARGS__) |
| #define vluxseg7ei64_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i64m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_i64m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_i64m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i8m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_i8m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_i8m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i8mf2 | ( | ... | ) | __riscv_vluxseg7ei64_v_i8mf2(__VA_ARGS__) |
| #define vluxseg7ei64_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i8mf4 | ( | ... | ) | __riscv_vluxseg7ei64_v_i8mf4(__VA_ARGS__) |
| #define vluxseg7ei64_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_i8mf8 | ( | ... | ) | __riscv_vluxseg7ei64_v_i8mf8(__VA_ARGS__) |
| #define vluxseg7ei64_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg7ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u16m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_u16m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_u16m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u16mf2 | ( | ... | ) | __riscv_vluxseg7ei64_v_u16mf2(__VA_ARGS__) |
| #define vluxseg7ei64_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u16mf4 | ( | ... | ) | __riscv_vluxseg7ei64_v_u16mf4(__VA_ARGS__) |
| #define vluxseg7ei64_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u32m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_u32m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_u32m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u32mf2 | ( | ... | ) | __riscv_vluxseg7ei64_v_u32mf2(__VA_ARGS__) |
| #define vluxseg7ei64_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u64m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_u64m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_u64m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u8m1 | ( | ... | ) | __riscv_vluxseg7ei64_v_u8m1(__VA_ARGS__) |
| #define vluxseg7ei64_v_u8m1_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u8mf2 | ( | ... | ) | __riscv_vluxseg7ei64_v_u8mf2(__VA_ARGS__) |
| #define vluxseg7ei64_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u8mf4 | ( | ... | ) | __riscv_vluxseg7ei64_v_u8mf4(__VA_ARGS__) |
| #define vluxseg7ei64_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei64_v_u8mf8 | ( | ... | ) | __riscv_vluxseg7ei64_v_u8mf8(__VA_ARGS__) |
| #define vluxseg7ei64_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg7ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_f16m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_f16m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_f16m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_f16mf2 | ( | ... | ) | __riscv_vluxseg7ei8_v_f16mf2(__VA_ARGS__) |
| #define vluxseg7ei8_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg7ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_f16mf4 | ( | ... | ) | __riscv_vluxseg7ei8_v_f16mf4(__VA_ARGS__) |
| #define vluxseg7ei8_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg7ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_f32m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_f32m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_f32m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_f32mf2 | ( | ... | ) | __riscv_vluxseg7ei8_v_f32mf2(__VA_ARGS__) |
| #define vluxseg7ei8_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg7ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_f64m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_f64m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_f64m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i16m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_i16m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_i16m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i16mf2 | ( | ... | ) | __riscv_vluxseg7ei8_v_i16mf2(__VA_ARGS__) |
| #define vluxseg7ei8_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i16mf4 | ( | ... | ) | __riscv_vluxseg7ei8_v_i16mf4(__VA_ARGS__) |
| #define vluxseg7ei8_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i32m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_i32m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_i32m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i32mf2 | ( | ... | ) | __riscv_vluxseg7ei8_v_i32mf2(__VA_ARGS__) |
| #define vluxseg7ei8_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i64m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_i64m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_i64m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i8m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_i8m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_i8m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i8mf2 | ( | ... | ) | __riscv_vluxseg7ei8_v_i8mf2(__VA_ARGS__) |
| #define vluxseg7ei8_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i8mf4 | ( | ... | ) | __riscv_vluxseg7ei8_v_i8mf4(__VA_ARGS__) |
| #define vluxseg7ei8_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_i8mf8 | ( | ... | ) | __riscv_vluxseg7ei8_v_i8mf8(__VA_ARGS__) |
| #define vluxseg7ei8_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg7ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u16m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_u16m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_u16m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u16mf2 | ( | ... | ) | __riscv_vluxseg7ei8_v_u16mf2(__VA_ARGS__) |
| #define vluxseg7ei8_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u16mf4 | ( | ... | ) | __riscv_vluxseg7ei8_v_u16mf4(__VA_ARGS__) |
| #define vluxseg7ei8_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u32m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_u32m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_u32m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u32mf2 | ( | ... | ) | __riscv_vluxseg7ei8_v_u32mf2(__VA_ARGS__) |
| #define vluxseg7ei8_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u64m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_u64m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_u64m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u8m1 | ( | ... | ) | __riscv_vluxseg7ei8_v_u8m1(__VA_ARGS__) |
| #define vluxseg7ei8_v_u8m1_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u8mf2 | ( | ... | ) | __riscv_vluxseg7ei8_v_u8mf2(__VA_ARGS__) |
| #define vluxseg7ei8_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u8mf4 | ( | ... | ) | __riscv_vluxseg7ei8_v_u8mf4(__VA_ARGS__) |
| #define vluxseg7ei8_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg7ei8_v_u8mf8 | ( | ... | ) | __riscv_vluxseg7ei8_v_u8mf8(__VA_ARGS__) |
| #define vluxseg7ei8_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg7ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_f16m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_f16m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_f16m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_f16mf2 | ( | ... | ) | __riscv_vluxseg8ei16_v_f16mf2(__VA_ARGS__) |
| #define vluxseg8ei16_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg8ei16_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_f16mf4 | ( | ... | ) | __riscv_vluxseg8ei16_v_f16mf4(__VA_ARGS__) |
| #define vluxseg8ei16_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg8ei16_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_f32m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_f32m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_f32m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_f32mf2 | ( | ... | ) | __riscv_vluxseg8ei16_v_f32mf2(__VA_ARGS__) |
| #define vluxseg8ei16_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg8ei16_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_f64m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_f64m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_f64m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i16m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_i16m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_i16m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i16mf2 | ( | ... | ) | __riscv_vluxseg8ei16_v_i16mf2(__VA_ARGS__) |
| #define vluxseg8ei16_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i16mf4 | ( | ... | ) | __riscv_vluxseg8ei16_v_i16mf4(__VA_ARGS__) |
| #define vluxseg8ei16_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i32m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_i32m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_i32m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i32mf2 | ( | ... | ) | __riscv_vluxseg8ei16_v_i32mf2(__VA_ARGS__) |
| #define vluxseg8ei16_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i64m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_i64m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_i64m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i8m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_i8m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_i8m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i8mf2 | ( | ... | ) | __riscv_vluxseg8ei16_v_i8mf2(__VA_ARGS__) |
| #define vluxseg8ei16_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i8mf4 | ( | ... | ) | __riscv_vluxseg8ei16_v_i8mf4(__VA_ARGS__) |
| #define vluxseg8ei16_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_i8mf8 | ( | ... | ) | __riscv_vluxseg8ei16_v_i8mf8(__VA_ARGS__) |
| #define vluxseg8ei16_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg8ei16_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u16m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_u16m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_u16m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u16mf2 | ( | ... | ) | __riscv_vluxseg8ei16_v_u16mf2(__VA_ARGS__) |
| #define vluxseg8ei16_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u16mf4 | ( | ... | ) | __riscv_vluxseg8ei16_v_u16mf4(__VA_ARGS__) |
| #define vluxseg8ei16_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u32m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_u32m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_u32m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u32mf2 | ( | ... | ) | __riscv_vluxseg8ei16_v_u32mf2(__VA_ARGS__) |
| #define vluxseg8ei16_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u64m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_u64m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_u64m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u8m1 | ( | ... | ) | __riscv_vluxseg8ei16_v_u8m1(__VA_ARGS__) |
| #define vluxseg8ei16_v_u8m1_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u8mf2 | ( | ... | ) | __riscv_vluxseg8ei16_v_u8mf2(__VA_ARGS__) |
| #define vluxseg8ei16_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u8mf4 | ( | ... | ) | __riscv_vluxseg8ei16_v_u8mf4(__VA_ARGS__) |
| #define vluxseg8ei16_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei16_v_u8mf8 | ( | ... | ) | __riscv_vluxseg8ei16_v_u8mf8(__VA_ARGS__) |
| #define vluxseg8ei16_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg8ei16_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_f16m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_f16m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_f16m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_f16mf2 | ( | ... | ) | __riscv_vluxseg8ei32_v_f16mf2(__VA_ARGS__) |
| #define vluxseg8ei32_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg8ei32_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_f16mf4 | ( | ... | ) | __riscv_vluxseg8ei32_v_f16mf4(__VA_ARGS__) |
| #define vluxseg8ei32_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg8ei32_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_f32m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_f32m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_f32m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_f32mf2 | ( | ... | ) | __riscv_vluxseg8ei32_v_f32mf2(__VA_ARGS__) |
| #define vluxseg8ei32_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg8ei32_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_f64m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_f64m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_f64m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i16m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_i16m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_i16m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i16mf2 | ( | ... | ) | __riscv_vluxseg8ei32_v_i16mf2(__VA_ARGS__) |
| #define vluxseg8ei32_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i16mf4 | ( | ... | ) | __riscv_vluxseg8ei32_v_i16mf4(__VA_ARGS__) |
| #define vluxseg8ei32_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i32m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_i32m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_i32m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i32mf2 | ( | ... | ) | __riscv_vluxseg8ei32_v_i32mf2(__VA_ARGS__) |
| #define vluxseg8ei32_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i64m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_i64m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_i64m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i8m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_i8m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_i8m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i8mf2 | ( | ... | ) | __riscv_vluxseg8ei32_v_i8mf2(__VA_ARGS__) |
| #define vluxseg8ei32_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i8mf4 | ( | ... | ) | __riscv_vluxseg8ei32_v_i8mf4(__VA_ARGS__) |
| #define vluxseg8ei32_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_i8mf8 | ( | ... | ) | __riscv_vluxseg8ei32_v_i8mf8(__VA_ARGS__) |
| #define vluxseg8ei32_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg8ei32_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u16m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_u16m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_u16m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u16mf2 | ( | ... | ) | __riscv_vluxseg8ei32_v_u16mf2(__VA_ARGS__) |
| #define vluxseg8ei32_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u16mf4 | ( | ... | ) | __riscv_vluxseg8ei32_v_u16mf4(__VA_ARGS__) |
| #define vluxseg8ei32_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u32m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_u32m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_u32m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u32mf2 | ( | ... | ) | __riscv_vluxseg8ei32_v_u32mf2(__VA_ARGS__) |
| #define vluxseg8ei32_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u64m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_u64m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_u64m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u8m1 | ( | ... | ) | __riscv_vluxseg8ei32_v_u8m1(__VA_ARGS__) |
| #define vluxseg8ei32_v_u8m1_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u8mf2 | ( | ... | ) | __riscv_vluxseg8ei32_v_u8mf2(__VA_ARGS__) |
| #define vluxseg8ei32_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u8mf4 | ( | ... | ) | __riscv_vluxseg8ei32_v_u8mf4(__VA_ARGS__) |
| #define vluxseg8ei32_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei32_v_u8mf8 | ( | ... | ) | __riscv_vluxseg8ei32_v_u8mf8(__VA_ARGS__) |
| #define vluxseg8ei32_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg8ei32_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_f16m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_f16m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_f16m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_f16mf2 | ( | ... | ) | __riscv_vluxseg8ei64_v_f16mf2(__VA_ARGS__) |
| #define vluxseg8ei64_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg8ei64_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_f16mf4 | ( | ... | ) | __riscv_vluxseg8ei64_v_f16mf4(__VA_ARGS__) |
| #define vluxseg8ei64_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg8ei64_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_f32m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_f32m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_f32m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_f32mf2 | ( | ... | ) | __riscv_vluxseg8ei64_v_f32mf2(__VA_ARGS__) |
| #define vluxseg8ei64_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg8ei64_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_f64m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_f64m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_f64m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i16m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_i16m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_i16m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i16mf2 | ( | ... | ) | __riscv_vluxseg8ei64_v_i16mf2(__VA_ARGS__) |
| #define vluxseg8ei64_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i16mf4 | ( | ... | ) | __riscv_vluxseg8ei64_v_i16mf4(__VA_ARGS__) |
| #define vluxseg8ei64_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i32m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_i32m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_i32m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i32mf2 | ( | ... | ) | __riscv_vluxseg8ei64_v_i32mf2(__VA_ARGS__) |
| #define vluxseg8ei64_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i64m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_i64m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_i64m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i8m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_i8m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_i8m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i8mf2 | ( | ... | ) | __riscv_vluxseg8ei64_v_i8mf2(__VA_ARGS__) |
| #define vluxseg8ei64_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i8mf4 | ( | ... | ) | __riscv_vluxseg8ei64_v_i8mf4(__VA_ARGS__) |
| #define vluxseg8ei64_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_i8mf8 | ( | ... | ) | __riscv_vluxseg8ei64_v_i8mf8(__VA_ARGS__) |
| #define vluxseg8ei64_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg8ei64_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u16m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_u16m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_u16m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u16mf2 | ( | ... | ) | __riscv_vluxseg8ei64_v_u16mf2(__VA_ARGS__) |
| #define vluxseg8ei64_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u16mf4 | ( | ... | ) | __riscv_vluxseg8ei64_v_u16mf4(__VA_ARGS__) |
| #define vluxseg8ei64_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u32m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_u32m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_u32m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u32mf2 | ( | ... | ) | __riscv_vluxseg8ei64_v_u32mf2(__VA_ARGS__) |
| #define vluxseg8ei64_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u64m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_u64m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_u64m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u8m1 | ( | ... | ) | __riscv_vluxseg8ei64_v_u8m1(__VA_ARGS__) |
| #define vluxseg8ei64_v_u8m1_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u8mf2 | ( | ... | ) | __riscv_vluxseg8ei64_v_u8mf2(__VA_ARGS__) |
| #define vluxseg8ei64_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u8mf4 | ( | ... | ) | __riscv_vluxseg8ei64_v_u8mf4(__VA_ARGS__) |
| #define vluxseg8ei64_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei64_v_u8mf8 | ( | ... | ) | __riscv_vluxseg8ei64_v_u8mf8(__VA_ARGS__) |
| #define vluxseg8ei64_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg8ei64_v_u8mf8_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_f16m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_f16m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_f16m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_f16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_f16mf2 | ( | ... | ) | __riscv_vluxseg8ei8_v_f16mf2(__VA_ARGS__) |
| #define vluxseg8ei8_v_f16mf2_m | ( | ... | ) | __riscv_vluxseg8ei8_v_f16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_f16mf4 | ( | ... | ) | __riscv_vluxseg8ei8_v_f16mf4(__VA_ARGS__) |
| #define vluxseg8ei8_v_f16mf4_m | ( | ... | ) | __riscv_vluxseg8ei8_v_f16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_f32m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_f32m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_f32m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_f32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_f32mf2 | ( | ... | ) | __riscv_vluxseg8ei8_v_f32mf2(__VA_ARGS__) |
| #define vluxseg8ei8_v_f32mf2_m | ( | ... | ) | __riscv_vluxseg8ei8_v_f32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_f64m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_f64m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_f64m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_f64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i16m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_i16m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_i16m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i16mf2 | ( | ... | ) | __riscv_vluxseg8ei8_v_i16mf2(__VA_ARGS__) |
| #define vluxseg8ei8_v_i16mf2_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i16mf4 | ( | ... | ) | __riscv_vluxseg8ei8_v_i16mf4(__VA_ARGS__) |
| #define vluxseg8ei8_v_i16mf4_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i32m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_i32m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_i32m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i32mf2 | ( | ... | ) | __riscv_vluxseg8ei8_v_i32mf2(__VA_ARGS__) |
| #define vluxseg8ei8_v_i32mf2_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i64m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_i64m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_i64m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i8m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_i8m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_i8m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i8m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i8mf2 | ( | ... | ) | __riscv_vluxseg8ei8_v_i8mf2(__VA_ARGS__) |
| #define vluxseg8ei8_v_i8mf2_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i8mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i8mf4 | ( | ... | ) | __riscv_vluxseg8ei8_v_i8mf4(__VA_ARGS__) |
| #define vluxseg8ei8_v_i8mf4_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i8mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_i8mf8 | ( | ... | ) | __riscv_vluxseg8ei8_v_i8mf8(__VA_ARGS__) |
| #define vluxseg8ei8_v_i8mf8_m | ( | ... | ) | __riscv_vluxseg8ei8_v_i8mf8_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u16m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_u16m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_u16m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u16m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u16mf2 | ( | ... | ) | __riscv_vluxseg8ei8_v_u16mf2(__VA_ARGS__) |
| #define vluxseg8ei8_v_u16mf2_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u16mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u16mf4 | ( | ... | ) | __riscv_vluxseg8ei8_v_u16mf4(__VA_ARGS__) |
| #define vluxseg8ei8_v_u16mf4_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u16mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u32m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_u32m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_u32m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u32m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u32mf2 | ( | ... | ) | __riscv_vluxseg8ei8_v_u32mf2(__VA_ARGS__) |
| #define vluxseg8ei8_v_u32mf2_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u32mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u64m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_u64m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_u64m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u64m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u8m1 | ( | ... | ) | __riscv_vluxseg8ei8_v_u8m1(__VA_ARGS__) |
| #define vluxseg8ei8_v_u8m1_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u8m1_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u8mf2 | ( | ... | ) | __riscv_vluxseg8ei8_v_u8mf2(__VA_ARGS__) |
| #define vluxseg8ei8_v_u8mf2_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u8mf2_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u8mf4 | ( | ... | ) | __riscv_vluxseg8ei8_v_u8mf4(__VA_ARGS__) |
| #define vluxseg8ei8_v_u8mf4_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u8mf4_tumu(__VA_ARGS__) |
| #define vluxseg8ei8_v_u8mf8 | ( | ... | ) | __riscv_vluxseg8ei8_v_u8mf8(__VA_ARGS__) |
| #define vluxseg8ei8_v_u8mf8_m | ( | ... | ) | __riscv_vluxseg8ei8_v_u8mf8_tumu(__VA_ARGS__) |
| #define vmacc_vv_i16m1 | ( | ... | ) | __riscv_vmacc_vv_i16m1_tu(__VA_ARGS__) |
| #define vmacc_vv_i16m1_m | ( | ... | ) | __riscv_vmacc_vv_i16m1_tumu(__VA_ARGS__) |
| #define vmacc_vv_i16m2 | ( | ... | ) | __riscv_vmacc_vv_i16m2_tu(__VA_ARGS__) |
| #define vmacc_vv_i16m2_m | ( | ... | ) | __riscv_vmacc_vv_i16m2_tumu(__VA_ARGS__) |
| #define vmacc_vv_i16m4 | ( | ... | ) | __riscv_vmacc_vv_i16m4_tu(__VA_ARGS__) |
| #define vmacc_vv_i16m4_m | ( | ... | ) | __riscv_vmacc_vv_i16m4_tumu(__VA_ARGS__) |
| #define vmacc_vv_i16m8 | ( | ... | ) | __riscv_vmacc_vv_i16m8_tu(__VA_ARGS__) |
| #define vmacc_vv_i16m8_m | ( | ... | ) | __riscv_vmacc_vv_i16m8_tumu(__VA_ARGS__) |
| #define vmacc_vv_i16mf2 | ( | ... | ) | __riscv_vmacc_vv_i16mf2_tu(__VA_ARGS__) |
| #define vmacc_vv_i16mf2_m | ( | ... | ) | __riscv_vmacc_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vmacc_vv_i16mf4 | ( | ... | ) | __riscv_vmacc_vv_i16mf4_tu(__VA_ARGS__) |
| #define vmacc_vv_i16mf4_m | ( | ... | ) | __riscv_vmacc_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vmacc_vv_i32m1 | ( | ... | ) | __riscv_vmacc_vv_i32m1_tu(__VA_ARGS__) |
| #define vmacc_vv_i32m1_m | ( | ... | ) | __riscv_vmacc_vv_i32m1_tumu(__VA_ARGS__) |
| #define vmacc_vv_i32m2 | ( | ... | ) | __riscv_vmacc_vv_i32m2_tu(__VA_ARGS__) |
| #define vmacc_vv_i32m2_m | ( | ... | ) | __riscv_vmacc_vv_i32m2_tumu(__VA_ARGS__) |
| #define vmacc_vv_i32m4 | ( | ... | ) | __riscv_vmacc_vv_i32m4_tu(__VA_ARGS__) |
| #define vmacc_vv_i32m4_m | ( | ... | ) | __riscv_vmacc_vv_i32m4_tumu(__VA_ARGS__) |
| #define vmacc_vv_i32m8 | ( | ... | ) | __riscv_vmacc_vv_i32m8_tu(__VA_ARGS__) |
| #define vmacc_vv_i32m8_m | ( | ... | ) | __riscv_vmacc_vv_i32m8_tumu(__VA_ARGS__) |
| #define vmacc_vv_i32mf2 | ( | ... | ) | __riscv_vmacc_vv_i32mf2_tu(__VA_ARGS__) |
| #define vmacc_vv_i32mf2_m | ( | ... | ) | __riscv_vmacc_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vmacc_vv_i64m1 | ( | ... | ) | __riscv_vmacc_vv_i64m1_tu(__VA_ARGS__) |
| #define vmacc_vv_i64m1_m | ( | ... | ) | __riscv_vmacc_vv_i64m1_tumu(__VA_ARGS__) |
| #define vmacc_vv_i64m2 | ( | ... | ) | __riscv_vmacc_vv_i64m2_tu(__VA_ARGS__) |
| #define vmacc_vv_i64m2_m | ( | ... | ) | __riscv_vmacc_vv_i64m2_tumu(__VA_ARGS__) |
| #define vmacc_vv_i64m4 | ( | ... | ) | __riscv_vmacc_vv_i64m4_tu(__VA_ARGS__) |
| #define vmacc_vv_i64m4_m | ( | ... | ) | __riscv_vmacc_vv_i64m4_tumu(__VA_ARGS__) |
| #define vmacc_vv_i64m8 | ( | ... | ) | __riscv_vmacc_vv_i64m8_tu(__VA_ARGS__) |
| #define vmacc_vv_i64m8_m | ( | ... | ) | __riscv_vmacc_vv_i64m8_tumu(__VA_ARGS__) |
| #define vmacc_vv_i8m1 | ( | ... | ) | __riscv_vmacc_vv_i8m1_tu(__VA_ARGS__) |
| #define vmacc_vv_i8m1_m | ( | ... | ) | __riscv_vmacc_vv_i8m1_tumu(__VA_ARGS__) |
| #define vmacc_vv_i8m2 | ( | ... | ) | __riscv_vmacc_vv_i8m2_tu(__VA_ARGS__) |
| #define vmacc_vv_i8m2_m | ( | ... | ) | __riscv_vmacc_vv_i8m2_tumu(__VA_ARGS__) |
| #define vmacc_vv_i8m4 | ( | ... | ) | __riscv_vmacc_vv_i8m4_tu(__VA_ARGS__) |
| #define vmacc_vv_i8m4_m | ( | ... | ) | __riscv_vmacc_vv_i8m4_tumu(__VA_ARGS__) |
| #define vmacc_vv_i8m8 | ( | ... | ) | __riscv_vmacc_vv_i8m8_tu(__VA_ARGS__) |
| #define vmacc_vv_i8m8_m | ( | ... | ) | __riscv_vmacc_vv_i8m8_tumu(__VA_ARGS__) |
| #define vmacc_vv_i8mf2 | ( | ... | ) | __riscv_vmacc_vv_i8mf2_tu(__VA_ARGS__) |
| #define vmacc_vv_i8mf2_m | ( | ... | ) | __riscv_vmacc_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vmacc_vv_i8mf4 | ( | ... | ) | __riscv_vmacc_vv_i8mf4_tu(__VA_ARGS__) |
| #define vmacc_vv_i8mf4_m | ( | ... | ) | __riscv_vmacc_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vmacc_vv_i8mf8 | ( | ... | ) | __riscv_vmacc_vv_i8mf8_tu(__VA_ARGS__) |
| #define vmacc_vv_i8mf8_m | ( | ... | ) | __riscv_vmacc_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vmacc_vv_u16m1 | ( | ... | ) | __riscv_vmacc_vv_u16m1_tu(__VA_ARGS__) |
| #define vmacc_vv_u16m1_m | ( | ... | ) | __riscv_vmacc_vv_u16m1_tumu(__VA_ARGS__) |
| #define vmacc_vv_u16m2 | ( | ... | ) | __riscv_vmacc_vv_u16m2_tu(__VA_ARGS__) |
| #define vmacc_vv_u16m2_m | ( | ... | ) | __riscv_vmacc_vv_u16m2_tumu(__VA_ARGS__) |
| #define vmacc_vv_u16m4 | ( | ... | ) | __riscv_vmacc_vv_u16m4_tu(__VA_ARGS__) |
| #define vmacc_vv_u16m4_m | ( | ... | ) | __riscv_vmacc_vv_u16m4_tumu(__VA_ARGS__) |
| #define vmacc_vv_u16m8 | ( | ... | ) | __riscv_vmacc_vv_u16m8_tu(__VA_ARGS__) |
| #define vmacc_vv_u16m8_m | ( | ... | ) | __riscv_vmacc_vv_u16m8_tumu(__VA_ARGS__) |
| #define vmacc_vv_u16mf2 | ( | ... | ) | __riscv_vmacc_vv_u16mf2_tu(__VA_ARGS__) |
| #define vmacc_vv_u16mf2_m | ( | ... | ) | __riscv_vmacc_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vmacc_vv_u16mf4 | ( | ... | ) | __riscv_vmacc_vv_u16mf4_tu(__VA_ARGS__) |
| #define vmacc_vv_u16mf4_m | ( | ... | ) | __riscv_vmacc_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vmacc_vv_u32m1 | ( | ... | ) | __riscv_vmacc_vv_u32m1_tu(__VA_ARGS__) |
| #define vmacc_vv_u32m1_m | ( | ... | ) | __riscv_vmacc_vv_u32m1_tumu(__VA_ARGS__) |
| #define vmacc_vv_u32m2 | ( | ... | ) | __riscv_vmacc_vv_u32m2_tu(__VA_ARGS__) |
| #define vmacc_vv_u32m2_m | ( | ... | ) | __riscv_vmacc_vv_u32m2_tumu(__VA_ARGS__) |
| #define vmacc_vv_u32m4 | ( | ... | ) | __riscv_vmacc_vv_u32m4_tu(__VA_ARGS__) |
| #define vmacc_vv_u32m4_m | ( | ... | ) | __riscv_vmacc_vv_u32m4_tumu(__VA_ARGS__) |
| #define vmacc_vv_u32m8 | ( | ... | ) | __riscv_vmacc_vv_u32m8_tu(__VA_ARGS__) |
| #define vmacc_vv_u32m8_m | ( | ... | ) | __riscv_vmacc_vv_u32m8_tumu(__VA_ARGS__) |
| #define vmacc_vv_u32mf2 | ( | ... | ) | __riscv_vmacc_vv_u32mf2_tu(__VA_ARGS__) |
| #define vmacc_vv_u32mf2_m | ( | ... | ) | __riscv_vmacc_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vmacc_vv_u64m1 | ( | ... | ) | __riscv_vmacc_vv_u64m1_tu(__VA_ARGS__) |
| #define vmacc_vv_u64m1_m | ( | ... | ) | __riscv_vmacc_vv_u64m1_tumu(__VA_ARGS__) |
| #define vmacc_vv_u64m2 | ( | ... | ) | __riscv_vmacc_vv_u64m2_tu(__VA_ARGS__) |
| #define vmacc_vv_u64m2_m | ( | ... | ) | __riscv_vmacc_vv_u64m2_tumu(__VA_ARGS__) |
| #define vmacc_vv_u64m4 | ( | ... | ) | __riscv_vmacc_vv_u64m4_tu(__VA_ARGS__) |
| #define vmacc_vv_u64m4_m | ( | ... | ) | __riscv_vmacc_vv_u64m4_tumu(__VA_ARGS__) |
| #define vmacc_vv_u64m8 | ( | ... | ) | __riscv_vmacc_vv_u64m8_tu(__VA_ARGS__) |
| #define vmacc_vv_u64m8_m | ( | ... | ) | __riscv_vmacc_vv_u64m8_tumu(__VA_ARGS__) |
| #define vmacc_vv_u8m1 | ( | ... | ) | __riscv_vmacc_vv_u8m1_tu(__VA_ARGS__) |
| #define vmacc_vv_u8m1_m | ( | ... | ) | __riscv_vmacc_vv_u8m1_tumu(__VA_ARGS__) |
| #define vmacc_vv_u8m2 | ( | ... | ) | __riscv_vmacc_vv_u8m2_tu(__VA_ARGS__) |
| #define vmacc_vv_u8m2_m | ( | ... | ) | __riscv_vmacc_vv_u8m2_tumu(__VA_ARGS__) |
| #define vmacc_vv_u8m4 | ( | ... | ) | __riscv_vmacc_vv_u8m4_tu(__VA_ARGS__) |
| #define vmacc_vv_u8m4_m | ( | ... | ) | __riscv_vmacc_vv_u8m4_tumu(__VA_ARGS__) |
| #define vmacc_vv_u8m8 | ( | ... | ) | __riscv_vmacc_vv_u8m8_tu(__VA_ARGS__) |
| #define vmacc_vv_u8m8_m | ( | ... | ) | __riscv_vmacc_vv_u8m8_tumu(__VA_ARGS__) |
| #define vmacc_vv_u8mf2 | ( | ... | ) | __riscv_vmacc_vv_u8mf2_tu(__VA_ARGS__) |
| #define vmacc_vv_u8mf2_m | ( | ... | ) | __riscv_vmacc_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vmacc_vv_u8mf4 | ( | ... | ) | __riscv_vmacc_vv_u8mf4_tu(__VA_ARGS__) |
| #define vmacc_vv_u8mf4_m | ( | ... | ) | __riscv_vmacc_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vmacc_vv_u8mf8 | ( | ... | ) | __riscv_vmacc_vv_u8mf8_tu(__VA_ARGS__) |
| #define vmacc_vv_u8mf8_m | ( | ... | ) | __riscv_vmacc_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vmacc_vx_i16m1 | ( | ... | ) | __riscv_vmacc_vx_i16m1_tu(__VA_ARGS__) |
| #define vmacc_vx_i16m1_m | ( | ... | ) | __riscv_vmacc_vx_i16m1_tumu(__VA_ARGS__) |
| #define vmacc_vx_i16m2 | ( | ... | ) | __riscv_vmacc_vx_i16m2_tu(__VA_ARGS__) |
| #define vmacc_vx_i16m2_m | ( | ... | ) | __riscv_vmacc_vx_i16m2_tumu(__VA_ARGS__) |
| #define vmacc_vx_i16m4 | ( | ... | ) | __riscv_vmacc_vx_i16m4_tu(__VA_ARGS__) |
| #define vmacc_vx_i16m4_m | ( | ... | ) | __riscv_vmacc_vx_i16m4_tumu(__VA_ARGS__) |
| #define vmacc_vx_i16m8 | ( | ... | ) | __riscv_vmacc_vx_i16m8_tu(__VA_ARGS__) |
| #define vmacc_vx_i16m8_m | ( | ... | ) | __riscv_vmacc_vx_i16m8_tumu(__VA_ARGS__) |
| #define vmacc_vx_i16mf2 | ( | ... | ) | __riscv_vmacc_vx_i16mf2_tu(__VA_ARGS__) |
| #define vmacc_vx_i16mf2_m | ( | ... | ) | __riscv_vmacc_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vmacc_vx_i16mf4 | ( | ... | ) | __riscv_vmacc_vx_i16mf4_tu(__VA_ARGS__) |
| #define vmacc_vx_i16mf4_m | ( | ... | ) | __riscv_vmacc_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vmacc_vx_i32m1 | ( | ... | ) | __riscv_vmacc_vx_i32m1_tu(__VA_ARGS__) |
| #define vmacc_vx_i32m1_m | ( | ... | ) | __riscv_vmacc_vx_i32m1_tumu(__VA_ARGS__) |
| #define vmacc_vx_i32m2 | ( | ... | ) | __riscv_vmacc_vx_i32m2_tu(__VA_ARGS__) |
| #define vmacc_vx_i32m2_m | ( | ... | ) | __riscv_vmacc_vx_i32m2_tumu(__VA_ARGS__) |
| #define vmacc_vx_i32m4 | ( | ... | ) | __riscv_vmacc_vx_i32m4_tu(__VA_ARGS__) |
| #define vmacc_vx_i32m4_m | ( | ... | ) | __riscv_vmacc_vx_i32m4_tumu(__VA_ARGS__) |
| #define vmacc_vx_i32m8 | ( | ... | ) | __riscv_vmacc_vx_i32m8_tu(__VA_ARGS__) |
| #define vmacc_vx_i32m8_m | ( | ... | ) | __riscv_vmacc_vx_i32m8_tumu(__VA_ARGS__) |
| #define vmacc_vx_i32mf2 | ( | ... | ) | __riscv_vmacc_vx_i32mf2_tu(__VA_ARGS__) |
| #define vmacc_vx_i32mf2_m | ( | ... | ) | __riscv_vmacc_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vmacc_vx_i64m1 | ( | ... | ) | __riscv_vmacc_vx_i64m1_tu(__VA_ARGS__) |
| #define vmacc_vx_i64m1_m | ( | ... | ) | __riscv_vmacc_vx_i64m1_tumu(__VA_ARGS__) |
| #define vmacc_vx_i64m2 | ( | ... | ) | __riscv_vmacc_vx_i64m2_tu(__VA_ARGS__) |
| #define vmacc_vx_i64m2_m | ( | ... | ) | __riscv_vmacc_vx_i64m2_tumu(__VA_ARGS__) |
| #define vmacc_vx_i64m4 | ( | ... | ) | __riscv_vmacc_vx_i64m4_tu(__VA_ARGS__) |
| #define vmacc_vx_i64m4_m | ( | ... | ) | __riscv_vmacc_vx_i64m4_tumu(__VA_ARGS__) |
| #define vmacc_vx_i64m8 | ( | ... | ) | __riscv_vmacc_vx_i64m8_tu(__VA_ARGS__) |
| #define vmacc_vx_i64m8_m | ( | ... | ) | __riscv_vmacc_vx_i64m8_tumu(__VA_ARGS__) |
| #define vmacc_vx_i8m1 | ( | ... | ) | __riscv_vmacc_vx_i8m1_tu(__VA_ARGS__) |
| #define vmacc_vx_i8m1_m | ( | ... | ) | __riscv_vmacc_vx_i8m1_tumu(__VA_ARGS__) |
| #define vmacc_vx_i8m2 | ( | ... | ) | __riscv_vmacc_vx_i8m2_tu(__VA_ARGS__) |
| #define vmacc_vx_i8m2_m | ( | ... | ) | __riscv_vmacc_vx_i8m2_tumu(__VA_ARGS__) |
| #define vmacc_vx_i8m4 | ( | ... | ) | __riscv_vmacc_vx_i8m4_tu(__VA_ARGS__) |
| #define vmacc_vx_i8m4_m | ( | ... | ) | __riscv_vmacc_vx_i8m4_tumu(__VA_ARGS__) |
| #define vmacc_vx_i8m8 | ( | ... | ) | __riscv_vmacc_vx_i8m8_tu(__VA_ARGS__) |
| #define vmacc_vx_i8m8_m | ( | ... | ) | __riscv_vmacc_vx_i8m8_tumu(__VA_ARGS__) |
| #define vmacc_vx_i8mf2 | ( | ... | ) | __riscv_vmacc_vx_i8mf2_tu(__VA_ARGS__) |
| #define vmacc_vx_i8mf2_m | ( | ... | ) | __riscv_vmacc_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vmacc_vx_i8mf4 | ( | ... | ) | __riscv_vmacc_vx_i8mf4_tu(__VA_ARGS__) |
| #define vmacc_vx_i8mf4_m | ( | ... | ) | __riscv_vmacc_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vmacc_vx_i8mf8 | ( | ... | ) | __riscv_vmacc_vx_i8mf8_tu(__VA_ARGS__) |
| #define vmacc_vx_i8mf8_m | ( | ... | ) | __riscv_vmacc_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vmacc_vx_u16m1 | ( | ... | ) | __riscv_vmacc_vx_u16m1_tu(__VA_ARGS__) |
| #define vmacc_vx_u16m1_m | ( | ... | ) | __riscv_vmacc_vx_u16m1_tumu(__VA_ARGS__) |
| #define vmacc_vx_u16m2 | ( | ... | ) | __riscv_vmacc_vx_u16m2_tu(__VA_ARGS__) |
| #define vmacc_vx_u16m2_m | ( | ... | ) | __riscv_vmacc_vx_u16m2_tumu(__VA_ARGS__) |
| #define vmacc_vx_u16m4 | ( | ... | ) | __riscv_vmacc_vx_u16m4_tu(__VA_ARGS__) |
| #define vmacc_vx_u16m4_m | ( | ... | ) | __riscv_vmacc_vx_u16m4_tumu(__VA_ARGS__) |
| #define vmacc_vx_u16m8 | ( | ... | ) | __riscv_vmacc_vx_u16m8_tu(__VA_ARGS__) |
| #define vmacc_vx_u16m8_m | ( | ... | ) | __riscv_vmacc_vx_u16m8_tumu(__VA_ARGS__) |
| #define vmacc_vx_u16mf2 | ( | ... | ) | __riscv_vmacc_vx_u16mf2_tu(__VA_ARGS__) |
| #define vmacc_vx_u16mf2_m | ( | ... | ) | __riscv_vmacc_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vmacc_vx_u16mf4 | ( | ... | ) | __riscv_vmacc_vx_u16mf4_tu(__VA_ARGS__) |
| #define vmacc_vx_u16mf4_m | ( | ... | ) | __riscv_vmacc_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vmacc_vx_u32m1 | ( | ... | ) | __riscv_vmacc_vx_u32m1_tu(__VA_ARGS__) |
| #define vmacc_vx_u32m1_m | ( | ... | ) | __riscv_vmacc_vx_u32m1_tumu(__VA_ARGS__) |
| #define vmacc_vx_u32m2 | ( | ... | ) | __riscv_vmacc_vx_u32m2_tu(__VA_ARGS__) |
| #define vmacc_vx_u32m2_m | ( | ... | ) | __riscv_vmacc_vx_u32m2_tumu(__VA_ARGS__) |
| #define vmacc_vx_u32m4 | ( | ... | ) | __riscv_vmacc_vx_u32m4_tu(__VA_ARGS__) |
| #define vmacc_vx_u32m4_m | ( | ... | ) | __riscv_vmacc_vx_u32m4_tumu(__VA_ARGS__) |
| #define vmacc_vx_u32m8 | ( | ... | ) | __riscv_vmacc_vx_u32m8_tu(__VA_ARGS__) |
| #define vmacc_vx_u32m8_m | ( | ... | ) | __riscv_vmacc_vx_u32m8_tumu(__VA_ARGS__) |
| #define vmacc_vx_u32mf2 | ( | ... | ) | __riscv_vmacc_vx_u32mf2_tu(__VA_ARGS__) |
| #define vmacc_vx_u32mf2_m | ( | ... | ) | __riscv_vmacc_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vmacc_vx_u64m1 | ( | ... | ) | __riscv_vmacc_vx_u64m1_tu(__VA_ARGS__) |
| #define vmacc_vx_u64m1_m | ( | ... | ) | __riscv_vmacc_vx_u64m1_tumu(__VA_ARGS__) |
| #define vmacc_vx_u64m2 | ( | ... | ) | __riscv_vmacc_vx_u64m2_tu(__VA_ARGS__) |
| #define vmacc_vx_u64m2_m | ( | ... | ) | __riscv_vmacc_vx_u64m2_tumu(__VA_ARGS__) |
| #define vmacc_vx_u64m4 | ( | ... | ) | __riscv_vmacc_vx_u64m4_tu(__VA_ARGS__) |
| #define vmacc_vx_u64m4_m | ( | ... | ) | __riscv_vmacc_vx_u64m4_tumu(__VA_ARGS__) |
| #define vmacc_vx_u64m8 | ( | ... | ) | __riscv_vmacc_vx_u64m8_tu(__VA_ARGS__) |
| #define vmacc_vx_u64m8_m | ( | ... | ) | __riscv_vmacc_vx_u64m8_tumu(__VA_ARGS__) |
| #define vmacc_vx_u8m1 | ( | ... | ) | __riscv_vmacc_vx_u8m1_tu(__VA_ARGS__) |
| #define vmacc_vx_u8m1_m | ( | ... | ) | __riscv_vmacc_vx_u8m1_tumu(__VA_ARGS__) |
| #define vmacc_vx_u8m2 | ( | ... | ) | __riscv_vmacc_vx_u8m2_tu(__VA_ARGS__) |
| #define vmacc_vx_u8m2_m | ( | ... | ) | __riscv_vmacc_vx_u8m2_tumu(__VA_ARGS__) |
| #define vmacc_vx_u8m4 | ( | ... | ) | __riscv_vmacc_vx_u8m4_tu(__VA_ARGS__) |
| #define vmacc_vx_u8m4_m | ( | ... | ) | __riscv_vmacc_vx_u8m4_tumu(__VA_ARGS__) |
| #define vmacc_vx_u8m8 | ( | ... | ) | __riscv_vmacc_vx_u8m8_tu(__VA_ARGS__) |
| #define vmacc_vx_u8m8_m | ( | ... | ) | __riscv_vmacc_vx_u8m8_tumu(__VA_ARGS__) |
| #define vmacc_vx_u8mf2 | ( | ... | ) | __riscv_vmacc_vx_u8mf2_tu(__VA_ARGS__) |
| #define vmacc_vx_u8mf2_m | ( | ... | ) | __riscv_vmacc_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vmacc_vx_u8mf4 | ( | ... | ) | __riscv_vmacc_vx_u8mf4_tu(__VA_ARGS__) |
| #define vmacc_vx_u8mf4_m | ( | ... | ) | __riscv_vmacc_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vmacc_vx_u8mf8 | ( | ... | ) | __riscv_vmacc_vx_u8mf8_tu(__VA_ARGS__) |
| #define vmacc_vx_u8mf8_m | ( | ... | ) | __riscv_vmacc_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vmadc_vv_i16m1_b16 | ( | ... | ) | __riscv_vmadc_vv_i16m1_b16(__VA_ARGS__) |
| #define vmadc_vv_i16m2_b8 | ( | ... | ) | __riscv_vmadc_vv_i16m2_b8(__VA_ARGS__) |
| #define vmadc_vv_i16m4_b4 | ( | ... | ) | __riscv_vmadc_vv_i16m4_b4(__VA_ARGS__) |
| #define vmadc_vv_i16m8_b2 | ( | ... | ) | __riscv_vmadc_vv_i16m8_b2(__VA_ARGS__) |
| #define vmadc_vv_i16mf2_b32 | ( | ... | ) | __riscv_vmadc_vv_i16mf2_b32(__VA_ARGS__) |
| #define vmadc_vv_i16mf4_b64 | ( | ... | ) | __riscv_vmadc_vv_i16mf4_b64(__VA_ARGS__) |
| #define vmadc_vv_i32m1_b32 | ( | ... | ) | __riscv_vmadc_vv_i32m1_b32(__VA_ARGS__) |
| #define vmadc_vv_i32m2_b16 | ( | ... | ) | __riscv_vmadc_vv_i32m2_b16(__VA_ARGS__) |
| #define vmadc_vv_i32m4_b8 | ( | ... | ) | __riscv_vmadc_vv_i32m4_b8(__VA_ARGS__) |
| #define vmadc_vv_i32m8_b4 | ( | ... | ) | __riscv_vmadc_vv_i32m8_b4(__VA_ARGS__) |
| #define vmadc_vv_i32mf2_b64 | ( | ... | ) | __riscv_vmadc_vv_i32mf2_b64(__VA_ARGS__) |
| #define vmadc_vv_i64m1_b64 | ( | ... | ) | __riscv_vmadc_vv_i64m1_b64(__VA_ARGS__) |
| #define vmadc_vv_i64m2_b32 | ( | ... | ) | __riscv_vmadc_vv_i64m2_b32(__VA_ARGS__) |
| #define vmadc_vv_i64m4_b16 | ( | ... | ) | __riscv_vmadc_vv_i64m4_b16(__VA_ARGS__) |
| #define vmadc_vv_i64m8_b8 | ( | ... | ) | __riscv_vmadc_vv_i64m8_b8(__VA_ARGS__) |
| #define vmadc_vv_i8m1_b8 | ( | ... | ) | __riscv_vmadc_vv_i8m1_b8(__VA_ARGS__) |
| #define vmadc_vv_i8m2_b4 | ( | ... | ) | __riscv_vmadc_vv_i8m2_b4(__VA_ARGS__) |
| #define vmadc_vv_i8m4_b2 | ( | ... | ) | __riscv_vmadc_vv_i8m4_b2(__VA_ARGS__) |
| #define vmadc_vv_i8m8_b1 | ( | ... | ) | __riscv_vmadc_vv_i8m8_b1(__VA_ARGS__) |
| #define vmadc_vv_i8mf2_b16 | ( | ... | ) | __riscv_vmadc_vv_i8mf2_b16(__VA_ARGS__) |
| #define vmadc_vv_i8mf4_b32 | ( | ... | ) | __riscv_vmadc_vv_i8mf4_b32(__VA_ARGS__) |
| #define vmadc_vv_i8mf8_b64 | ( | ... | ) | __riscv_vmadc_vv_i8mf8_b64(__VA_ARGS__) |
| #define vmadc_vv_u16m1_b16 | ( | ... | ) | __riscv_vmadc_vv_u16m1_b16(__VA_ARGS__) |
| #define vmadc_vv_u16m2_b8 | ( | ... | ) | __riscv_vmadc_vv_u16m2_b8(__VA_ARGS__) |
| #define vmadc_vv_u16m4_b4 | ( | ... | ) | __riscv_vmadc_vv_u16m4_b4(__VA_ARGS__) |
| #define vmadc_vv_u16m8_b2 | ( | ... | ) | __riscv_vmadc_vv_u16m8_b2(__VA_ARGS__) |
| #define vmadc_vv_u16mf2_b32 | ( | ... | ) | __riscv_vmadc_vv_u16mf2_b32(__VA_ARGS__) |
| #define vmadc_vv_u16mf4_b64 | ( | ... | ) | __riscv_vmadc_vv_u16mf4_b64(__VA_ARGS__) |
| #define vmadc_vv_u32m1_b32 | ( | ... | ) | __riscv_vmadc_vv_u32m1_b32(__VA_ARGS__) |
| #define vmadc_vv_u32m2_b16 | ( | ... | ) | __riscv_vmadc_vv_u32m2_b16(__VA_ARGS__) |
| #define vmadc_vv_u32m4_b8 | ( | ... | ) | __riscv_vmadc_vv_u32m4_b8(__VA_ARGS__) |
| #define vmadc_vv_u32m8_b4 | ( | ... | ) | __riscv_vmadc_vv_u32m8_b4(__VA_ARGS__) |
| #define vmadc_vv_u32mf2_b64 | ( | ... | ) | __riscv_vmadc_vv_u32mf2_b64(__VA_ARGS__) |
| #define vmadc_vv_u64m1_b64 | ( | ... | ) | __riscv_vmadc_vv_u64m1_b64(__VA_ARGS__) |
| #define vmadc_vv_u64m2_b32 | ( | ... | ) | __riscv_vmadc_vv_u64m2_b32(__VA_ARGS__) |
| #define vmadc_vv_u64m4_b16 | ( | ... | ) | __riscv_vmadc_vv_u64m4_b16(__VA_ARGS__) |
| #define vmadc_vv_u64m8_b8 | ( | ... | ) | __riscv_vmadc_vv_u64m8_b8(__VA_ARGS__) |
| #define vmadc_vv_u8m1_b8 | ( | ... | ) | __riscv_vmadc_vv_u8m1_b8(__VA_ARGS__) |
| #define vmadc_vv_u8m2_b4 | ( | ... | ) | __riscv_vmadc_vv_u8m2_b4(__VA_ARGS__) |
| #define vmadc_vv_u8m4_b2 | ( | ... | ) | __riscv_vmadc_vv_u8m4_b2(__VA_ARGS__) |
| #define vmadc_vv_u8m8_b1 | ( | ... | ) | __riscv_vmadc_vv_u8m8_b1(__VA_ARGS__) |
| #define vmadc_vv_u8mf2_b16 | ( | ... | ) | __riscv_vmadc_vv_u8mf2_b16(__VA_ARGS__) |
| #define vmadc_vv_u8mf4_b32 | ( | ... | ) | __riscv_vmadc_vv_u8mf4_b32(__VA_ARGS__) |
| #define vmadc_vv_u8mf8_b64 | ( | ... | ) | __riscv_vmadc_vv_u8mf8_b64(__VA_ARGS__) |
| #define vmadc_vvm_i16m1_b16 | ( | ... | ) | __riscv_vmadc_vvm_i16m1_b16(__VA_ARGS__) |
| #define vmadc_vvm_i16m2_b8 | ( | ... | ) | __riscv_vmadc_vvm_i16m2_b8(__VA_ARGS__) |
| #define vmadc_vvm_i16m4_b4 | ( | ... | ) | __riscv_vmadc_vvm_i16m4_b4(__VA_ARGS__) |
| #define vmadc_vvm_i16m8_b2 | ( | ... | ) | __riscv_vmadc_vvm_i16m8_b2(__VA_ARGS__) |
| #define vmadc_vvm_i16mf2_b32 | ( | ... | ) | __riscv_vmadc_vvm_i16mf2_b32(__VA_ARGS__) |
| #define vmadc_vvm_i16mf4_b64 | ( | ... | ) | __riscv_vmadc_vvm_i16mf4_b64(__VA_ARGS__) |
| #define vmadc_vvm_i32m1_b32 | ( | ... | ) | __riscv_vmadc_vvm_i32m1_b32(__VA_ARGS__) |
| #define vmadc_vvm_i32m2_b16 | ( | ... | ) | __riscv_vmadc_vvm_i32m2_b16(__VA_ARGS__) |
| #define vmadc_vvm_i32m4_b8 | ( | ... | ) | __riscv_vmadc_vvm_i32m4_b8(__VA_ARGS__) |
| #define vmadc_vvm_i32m8_b4 | ( | ... | ) | __riscv_vmadc_vvm_i32m8_b4(__VA_ARGS__) |
| #define vmadc_vvm_i32mf2_b64 | ( | ... | ) | __riscv_vmadc_vvm_i32mf2_b64(__VA_ARGS__) |
| #define vmadc_vvm_i64m1_b64 | ( | ... | ) | __riscv_vmadc_vvm_i64m1_b64(__VA_ARGS__) |
| #define vmadc_vvm_i64m2_b32 | ( | ... | ) | __riscv_vmadc_vvm_i64m2_b32(__VA_ARGS__) |
| #define vmadc_vvm_i64m4_b16 | ( | ... | ) | __riscv_vmadc_vvm_i64m4_b16(__VA_ARGS__) |
| #define vmadc_vvm_i64m8_b8 | ( | ... | ) | __riscv_vmadc_vvm_i64m8_b8(__VA_ARGS__) |
| #define vmadc_vvm_i8m1_b8 | ( | ... | ) | __riscv_vmadc_vvm_i8m1_b8(__VA_ARGS__) |
| #define vmadc_vvm_i8m2_b4 | ( | ... | ) | __riscv_vmadc_vvm_i8m2_b4(__VA_ARGS__) |
| #define vmadc_vvm_i8m4_b2 | ( | ... | ) | __riscv_vmadc_vvm_i8m4_b2(__VA_ARGS__) |
| #define vmadc_vvm_i8m8_b1 | ( | ... | ) | __riscv_vmadc_vvm_i8m8_b1(__VA_ARGS__) |
| #define vmadc_vvm_i8mf2_b16 | ( | ... | ) | __riscv_vmadc_vvm_i8mf2_b16(__VA_ARGS__) |
| #define vmadc_vvm_i8mf4_b32 | ( | ... | ) | __riscv_vmadc_vvm_i8mf4_b32(__VA_ARGS__) |
| #define vmadc_vvm_i8mf8_b64 | ( | ... | ) | __riscv_vmadc_vvm_i8mf8_b64(__VA_ARGS__) |
| #define vmadc_vvm_u16m1_b16 | ( | ... | ) | __riscv_vmadc_vvm_u16m1_b16(__VA_ARGS__) |
| #define vmadc_vvm_u16m2_b8 | ( | ... | ) | __riscv_vmadc_vvm_u16m2_b8(__VA_ARGS__) |
| #define vmadc_vvm_u16m4_b4 | ( | ... | ) | __riscv_vmadc_vvm_u16m4_b4(__VA_ARGS__) |
| #define vmadc_vvm_u16m8_b2 | ( | ... | ) | __riscv_vmadc_vvm_u16m8_b2(__VA_ARGS__) |
| #define vmadc_vvm_u16mf2_b32 | ( | ... | ) | __riscv_vmadc_vvm_u16mf2_b32(__VA_ARGS__) |
| #define vmadc_vvm_u16mf4_b64 | ( | ... | ) | __riscv_vmadc_vvm_u16mf4_b64(__VA_ARGS__) |
| #define vmadc_vvm_u32m1_b32 | ( | ... | ) | __riscv_vmadc_vvm_u32m1_b32(__VA_ARGS__) |
| #define vmadc_vvm_u32m2_b16 | ( | ... | ) | __riscv_vmadc_vvm_u32m2_b16(__VA_ARGS__) |
| #define vmadc_vvm_u32m4_b8 | ( | ... | ) | __riscv_vmadc_vvm_u32m4_b8(__VA_ARGS__) |
| #define vmadc_vvm_u32m8_b4 | ( | ... | ) | __riscv_vmadc_vvm_u32m8_b4(__VA_ARGS__) |
| #define vmadc_vvm_u32mf2_b64 | ( | ... | ) | __riscv_vmadc_vvm_u32mf2_b64(__VA_ARGS__) |
| #define vmadc_vvm_u64m1_b64 | ( | ... | ) | __riscv_vmadc_vvm_u64m1_b64(__VA_ARGS__) |
| #define vmadc_vvm_u64m2_b32 | ( | ... | ) | __riscv_vmadc_vvm_u64m2_b32(__VA_ARGS__) |
| #define vmadc_vvm_u64m4_b16 | ( | ... | ) | __riscv_vmadc_vvm_u64m4_b16(__VA_ARGS__) |
| #define vmadc_vvm_u64m8_b8 | ( | ... | ) | __riscv_vmadc_vvm_u64m8_b8(__VA_ARGS__) |
| #define vmadc_vvm_u8m1_b8 | ( | ... | ) | __riscv_vmadc_vvm_u8m1_b8(__VA_ARGS__) |
| #define vmadc_vvm_u8m2_b4 | ( | ... | ) | __riscv_vmadc_vvm_u8m2_b4(__VA_ARGS__) |
| #define vmadc_vvm_u8m4_b2 | ( | ... | ) | __riscv_vmadc_vvm_u8m4_b2(__VA_ARGS__) |
| #define vmadc_vvm_u8m8_b1 | ( | ... | ) | __riscv_vmadc_vvm_u8m8_b1(__VA_ARGS__) |
| #define vmadc_vvm_u8mf2_b16 | ( | ... | ) | __riscv_vmadc_vvm_u8mf2_b16(__VA_ARGS__) |
| #define vmadc_vvm_u8mf4_b32 | ( | ... | ) | __riscv_vmadc_vvm_u8mf4_b32(__VA_ARGS__) |
| #define vmadc_vvm_u8mf8_b64 | ( | ... | ) | __riscv_vmadc_vvm_u8mf8_b64(__VA_ARGS__) |
| #define vmadc_vx_i16m1_b16 | ( | ... | ) | __riscv_vmadc_vx_i16m1_b16(__VA_ARGS__) |
| #define vmadc_vx_i16m2_b8 | ( | ... | ) | __riscv_vmadc_vx_i16m2_b8(__VA_ARGS__) |
| #define vmadc_vx_i16m4_b4 | ( | ... | ) | __riscv_vmadc_vx_i16m4_b4(__VA_ARGS__) |
| #define vmadc_vx_i16m8_b2 | ( | ... | ) | __riscv_vmadc_vx_i16m8_b2(__VA_ARGS__) |
| #define vmadc_vx_i16mf2_b32 | ( | ... | ) | __riscv_vmadc_vx_i16mf2_b32(__VA_ARGS__) |
| #define vmadc_vx_i16mf4_b64 | ( | ... | ) | __riscv_vmadc_vx_i16mf4_b64(__VA_ARGS__) |
| #define vmadc_vx_i32m1_b32 | ( | ... | ) | __riscv_vmadc_vx_i32m1_b32(__VA_ARGS__) |
| #define vmadc_vx_i32m2_b16 | ( | ... | ) | __riscv_vmadc_vx_i32m2_b16(__VA_ARGS__) |
| #define vmadc_vx_i32m4_b8 | ( | ... | ) | __riscv_vmadc_vx_i32m4_b8(__VA_ARGS__) |
| #define vmadc_vx_i32m8_b4 | ( | ... | ) | __riscv_vmadc_vx_i32m8_b4(__VA_ARGS__) |
| #define vmadc_vx_i32mf2_b64 | ( | ... | ) | __riscv_vmadc_vx_i32mf2_b64(__VA_ARGS__) |
| #define vmadc_vx_i64m1_b64 | ( | ... | ) | __riscv_vmadc_vx_i64m1_b64(__VA_ARGS__) |
| #define vmadc_vx_i64m2_b32 | ( | ... | ) | __riscv_vmadc_vx_i64m2_b32(__VA_ARGS__) |
| #define vmadc_vx_i64m4_b16 | ( | ... | ) | __riscv_vmadc_vx_i64m4_b16(__VA_ARGS__) |
| #define vmadc_vx_i64m8_b8 | ( | ... | ) | __riscv_vmadc_vx_i64m8_b8(__VA_ARGS__) |
| #define vmadc_vx_i8m1_b8 | ( | ... | ) | __riscv_vmadc_vx_i8m1_b8(__VA_ARGS__) |
| #define vmadc_vx_i8m2_b4 | ( | ... | ) | __riscv_vmadc_vx_i8m2_b4(__VA_ARGS__) |
| #define vmadc_vx_i8m4_b2 | ( | ... | ) | __riscv_vmadc_vx_i8m4_b2(__VA_ARGS__) |
| #define vmadc_vx_i8m8_b1 | ( | ... | ) | __riscv_vmadc_vx_i8m8_b1(__VA_ARGS__) |
| #define vmadc_vx_i8mf2_b16 | ( | ... | ) | __riscv_vmadc_vx_i8mf2_b16(__VA_ARGS__) |
| #define vmadc_vx_i8mf4_b32 | ( | ... | ) | __riscv_vmadc_vx_i8mf4_b32(__VA_ARGS__) |
| #define vmadc_vx_i8mf8_b64 | ( | ... | ) | __riscv_vmadc_vx_i8mf8_b64(__VA_ARGS__) |
| #define vmadc_vx_u16m1_b16 | ( | ... | ) | __riscv_vmadc_vx_u16m1_b16(__VA_ARGS__) |
| #define vmadc_vx_u16m2_b8 | ( | ... | ) | __riscv_vmadc_vx_u16m2_b8(__VA_ARGS__) |
| #define vmadc_vx_u16m4_b4 | ( | ... | ) | __riscv_vmadc_vx_u16m4_b4(__VA_ARGS__) |
| #define vmadc_vx_u16m8_b2 | ( | ... | ) | __riscv_vmadc_vx_u16m8_b2(__VA_ARGS__) |
| #define vmadc_vx_u16mf2_b32 | ( | ... | ) | __riscv_vmadc_vx_u16mf2_b32(__VA_ARGS__) |
| #define vmadc_vx_u16mf4_b64 | ( | ... | ) | __riscv_vmadc_vx_u16mf4_b64(__VA_ARGS__) |
| #define vmadc_vx_u32m1_b32 | ( | ... | ) | __riscv_vmadc_vx_u32m1_b32(__VA_ARGS__) |
| #define vmadc_vx_u32m2_b16 | ( | ... | ) | __riscv_vmadc_vx_u32m2_b16(__VA_ARGS__) |
| #define vmadc_vx_u32m4_b8 | ( | ... | ) | __riscv_vmadc_vx_u32m4_b8(__VA_ARGS__) |
| #define vmadc_vx_u32m8_b4 | ( | ... | ) | __riscv_vmadc_vx_u32m8_b4(__VA_ARGS__) |
| #define vmadc_vx_u32mf2_b64 | ( | ... | ) | __riscv_vmadc_vx_u32mf2_b64(__VA_ARGS__) |
| #define vmadc_vx_u64m1_b64 | ( | ... | ) | __riscv_vmadc_vx_u64m1_b64(__VA_ARGS__) |
| #define vmadc_vx_u64m2_b32 | ( | ... | ) | __riscv_vmadc_vx_u64m2_b32(__VA_ARGS__) |
| #define vmadc_vx_u64m4_b16 | ( | ... | ) | __riscv_vmadc_vx_u64m4_b16(__VA_ARGS__) |
| #define vmadc_vx_u64m8_b8 | ( | ... | ) | __riscv_vmadc_vx_u64m8_b8(__VA_ARGS__) |
| #define vmadc_vx_u8m1_b8 | ( | ... | ) | __riscv_vmadc_vx_u8m1_b8(__VA_ARGS__) |
| #define vmadc_vx_u8m2_b4 | ( | ... | ) | __riscv_vmadc_vx_u8m2_b4(__VA_ARGS__) |
| #define vmadc_vx_u8m4_b2 | ( | ... | ) | __riscv_vmadc_vx_u8m4_b2(__VA_ARGS__) |
| #define vmadc_vx_u8m8_b1 | ( | ... | ) | __riscv_vmadc_vx_u8m8_b1(__VA_ARGS__) |
| #define vmadc_vx_u8mf2_b16 | ( | ... | ) | __riscv_vmadc_vx_u8mf2_b16(__VA_ARGS__) |
| #define vmadc_vx_u8mf4_b32 | ( | ... | ) | __riscv_vmadc_vx_u8mf4_b32(__VA_ARGS__) |
| #define vmadc_vx_u8mf8_b64 | ( | ... | ) | __riscv_vmadc_vx_u8mf8_b64(__VA_ARGS__) |
| #define vmadc_vxm_i16m1_b16 | ( | ... | ) | __riscv_vmadc_vxm_i16m1_b16(__VA_ARGS__) |
| #define vmadc_vxm_i16m2_b8 | ( | ... | ) | __riscv_vmadc_vxm_i16m2_b8(__VA_ARGS__) |
| #define vmadc_vxm_i16m4_b4 | ( | ... | ) | __riscv_vmadc_vxm_i16m4_b4(__VA_ARGS__) |
| #define vmadc_vxm_i16m8_b2 | ( | ... | ) | __riscv_vmadc_vxm_i16m8_b2(__VA_ARGS__) |
| #define vmadc_vxm_i16mf2_b32 | ( | ... | ) | __riscv_vmadc_vxm_i16mf2_b32(__VA_ARGS__) |
| #define vmadc_vxm_i16mf4_b64 | ( | ... | ) | __riscv_vmadc_vxm_i16mf4_b64(__VA_ARGS__) |
| #define vmadc_vxm_i32m1_b32 | ( | ... | ) | __riscv_vmadc_vxm_i32m1_b32(__VA_ARGS__) |
| #define vmadc_vxm_i32m2_b16 | ( | ... | ) | __riscv_vmadc_vxm_i32m2_b16(__VA_ARGS__) |
| #define vmadc_vxm_i32m4_b8 | ( | ... | ) | __riscv_vmadc_vxm_i32m4_b8(__VA_ARGS__) |
| #define vmadc_vxm_i32m8_b4 | ( | ... | ) | __riscv_vmadc_vxm_i32m8_b4(__VA_ARGS__) |
| #define vmadc_vxm_i32mf2_b64 | ( | ... | ) | __riscv_vmadc_vxm_i32mf2_b64(__VA_ARGS__) |
| #define vmadc_vxm_i64m1_b64 | ( | ... | ) | __riscv_vmadc_vxm_i64m1_b64(__VA_ARGS__) |
| #define vmadc_vxm_i64m2_b32 | ( | ... | ) | __riscv_vmadc_vxm_i64m2_b32(__VA_ARGS__) |
| #define vmadc_vxm_i64m4_b16 | ( | ... | ) | __riscv_vmadc_vxm_i64m4_b16(__VA_ARGS__) |
| #define vmadc_vxm_i64m8_b8 | ( | ... | ) | __riscv_vmadc_vxm_i64m8_b8(__VA_ARGS__) |
| #define vmadc_vxm_i8m1_b8 | ( | ... | ) | __riscv_vmadc_vxm_i8m1_b8(__VA_ARGS__) |
| #define vmadc_vxm_i8m2_b4 | ( | ... | ) | __riscv_vmadc_vxm_i8m2_b4(__VA_ARGS__) |
| #define vmadc_vxm_i8m4_b2 | ( | ... | ) | __riscv_vmadc_vxm_i8m4_b2(__VA_ARGS__) |
| #define vmadc_vxm_i8m8_b1 | ( | ... | ) | __riscv_vmadc_vxm_i8m8_b1(__VA_ARGS__) |
| #define vmadc_vxm_i8mf2_b16 | ( | ... | ) | __riscv_vmadc_vxm_i8mf2_b16(__VA_ARGS__) |
| #define vmadc_vxm_i8mf4_b32 | ( | ... | ) | __riscv_vmadc_vxm_i8mf4_b32(__VA_ARGS__) |
| #define vmadc_vxm_i8mf8_b64 | ( | ... | ) | __riscv_vmadc_vxm_i8mf8_b64(__VA_ARGS__) |
| #define vmadc_vxm_u16m1_b16 | ( | ... | ) | __riscv_vmadc_vxm_u16m1_b16(__VA_ARGS__) |
| #define vmadc_vxm_u16m2_b8 | ( | ... | ) | __riscv_vmadc_vxm_u16m2_b8(__VA_ARGS__) |
| #define vmadc_vxm_u16m4_b4 | ( | ... | ) | __riscv_vmadc_vxm_u16m4_b4(__VA_ARGS__) |
| #define vmadc_vxm_u16m8_b2 | ( | ... | ) | __riscv_vmadc_vxm_u16m8_b2(__VA_ARGS__) |
| #define vmadc_vxm_u16mf2_b32 | ( | ... | ) | __riscv_vmadc_vxm_u16mf2_b32(__VA_ARGS__) |
| #define vmadc_vxm_u16mf4_b64 | ( | ... | ) | __riscv_vmadc_vxm_u16mf4_b64(__VA_ARGS__) |
| #define vmadc_vxm_u32m1_b32 | ( | ... | ) | __riscv_vmadc_vxm_u32m1_b32(__VA_ARGS__) |
| #define vmadc_vxm_u32m2_b16 | ( | ... | ) | __riscv_vmadc_vxm_u32m2_b16(__VA_ARGS__) |
| #define vmadc_vxm_u32m4_b8 | ( | ... | ) | __riscv_vmadc_vxm_u32m4_b8(__VA_ARGS__) |
| #define vmadc_vxm_u32m8_b4 | ( | ... | ) | __riscv_vmadc_vxm_u32m8_b4(__VA_ARGS__) |
| #define vmadc_vxm_u32mf2_b64 | ( | ... | ) | __riscv_vmadc_vxm_u32mf2_b64(__VA_ARGS__) |
| #define vmadc_vxm_u64m1_b64 | ( | ... | ) | __riscv_vmadc_vxm_u64m1_b64(__VA_ARGS__) |
| #define vmadc_vxm_u64m2_b32 | ( | ... | ) | __riscv_vmadc_vxm_u64m2_b32(__VA_ARGS__) |
| #define vmadc_vxm_u64m4_b16 | ( | ... | ) | __riscv_vmadc_vxm_u64m4_b16(__VA_ARGS__) |
| #define vmadc_vxm_u64m8_b8 | ( | ... | ) | __riscv_vmadc_vxm_u64m8_b8(__VA_ARGS__) |
| #define vmadc_vxm_u8m1_b8 | ( | ... | ) | __riscv_vmadc_vxm_u8m1_b8(__VA_ARGS__) |
| #define vmadc_vxm_u8m2_b4 | ( | ... | ) | __riscv_vmadc_vxm_u8m2_b4(__VA_ARGS__) |
| #define vmadc_vxm_u8m4_b2 | ( | ... | ) | __riscv_vmadc_vxm_u8m4_b2(__VA_ARGS__) |
| #define vmadc_vxm_u8m8_b1 | ( | ... | ) | __riscv_vmadc_vxm_u8m8_b1(__VA_ARGS__) |
| #define vmadc_vxm_u8mf2_b16 | ( | ... | ) | __riscv_vmadc_vxm_u8mf2_b16(__VA_ARGS__) |
| #define vmadc_vxm_u8mf4_b32 | ( | ... | ) | __riscv_vmadc_vxm_u8mf4_b32(__VA_ARGS__) |
| #define vmadc_vxm_u8mf8_b64 | ( | ... | ) | __riscv_vmadc_vxm_u8mf8_b64(__VA_ARGS__) |
| #define vmadd_vv_i16m1 | ( | ... | ) | __riscv_vmadd_vv_i16m1_tu(__VA_ARGS__) |
| #define vmadd_vv_i16m1_m | ( | ... | ) | __riscv_vmadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define vmadd_vv_i16m2 | ( | ... | ) | __riscv_vmadd_vv_i16m2_tu(__VA_ARGS__) |
| #define vmadd_vv_i16m2_m | ( | ... | ) | __riscv_vmadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define vmadd_vv_i16m4 | ( | ... | ) | __riscv_vmadd_vv_i16m4_tu(__VA_ARGS__) |
| #define vmadd_vv_i16m4_m | ( | ... | ) | __riscv_vmadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define vmadd_vv_i16m8 | ( | ... | ) | __riscv_vmadd_vv_i16m8_tu(__VA_ARGS__) |
| #define vmadd_vv_i16m8_m | ( | ... | ) | __riscv_vmadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define vmadd_vv_i16mf2 | ( | ... | ) | __riscv_vmadd_vv_i16mf2_tu(__VA_ARGS__) |
| #define vmadd_vv_i16mf2_m | ( | ... | ) | __riscv_vmadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vmadd_vv_i16mf4 | ( | ... | ) | __riscv_vmadd_vv_i16mf4_tu(__VA_ARGS__) |
| #define vmadd_vv_i16mf4_m | ( | ... | ) | __riscv_vmadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vmadd_vv_i32m1 | ( | ... | ) | __riscv_vmadd_vv_i32m1_tu(__VA_ARGS__) |
| #define vmadd_vv_i32m1_m | ( | ... | ) | __riscv_vmadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define vmadd_vv_i32m2 | ( | ... | ) | __riscv_vmadd_vv_i32m2_tu(__VA_ARGS__) |
| #define vmadd_vv_i32m2_m | ( | ... | ) | __riscv_vmadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define vmadd_vv_i32m4 | ( | ... | ) | __riscv_vmadd_vv_i32m4_tu(__VA_ARGS__) |
| #define vmadd_vv_i32m4_m | ( | ... | ) | __riscv_vmadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define vmadd_vv_i32m8 | ( | ... | ) | __riscv_vmadd_vv_i32m8_tu(__VA_ARGS__) |
| #define vmadd_vv_i32m8_m | ( | ... | ) | __riscv_vmadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define vmadd_vv_i32mf2 | ( | ... | ) | __riscv_vmadd_vv_i32mf2_tu(__VA_ARGS__) |
| #define vmadd_vv_i32mf2_m | ( | ... | ) | __riscv_vmadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vmadd_vv_i64m1 | ( | ... | ) | __riscv_vmadd_vv_i64m1_tu(__VA_ARGS__) |
| #define vmadd_vv_i64m1_m | ( | ... | ) | __riscv_vmadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define vmadd_vv_i64m2 | ( | ... | ) | __riscv_vmadd_vv_i64m2_tu(__VA_ARGS__) |
| #define vmadd_vv_i64m2_m | ( | ... | ) | __riscv_vmadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define vmadd_vv_i64m4 | ( | ... | ) | __riscv_vmadd_vv_i64m4_tu(__VA_ARGS__) |
| #define vmadd_vv_i64m4_m | ( | ... | ) | __riscv_vmadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define vmadd_vv_i64m8 | ( | ... | ) | __riscv_vmadd_vv_i64m8_tu(__VA_ARGS__) |
| #define vmadd_vv_i64m8_m | ( | ... | ) | __riscv_vmadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define vmadd_vv_i8m1 | ( | ... | ) | __riscv_vmadd_vv_i8m1_tu(__VA_ARGS__) |
| #define vmadd_vv_i8m1_m | ( | ... | ) | __riscv_vmadd_vv_i8m1_tumu(__VA_ARGS__) |
| #define vmadd_vv_i8m2 | ( | ... | ) | __riscv_vmadd_vv_i8m2_tu(__VA_ARGS__) |
| #define vmadd_vv_i8m2_m | ( | ... | ) | __riscv_vmadd_vv_i8m2_tumu(__VA_ARGS__) |
| #define vmadd_vv_i8m4 | ( | ... | ) | __riscv_vmadd_vv_i8m4_tu(__VA_ARGS__) |
| #define vmadd_vv_i8m4_m | ( | ... | ) | __riscv_vmadd_vv_i8m4_tumu(__VA_ARGS__) |
| #define vmadd_vv_i8m8 | ( | ... | ) | __riscv_vmadd_vv_i8m8_tu(__VA_ARGS__) |
| #define vmadd_vv_i8m8_m | ( | ... | ) | __riscv_vmadd_vv_i8m8_tumu(__VA_ARGS__) |
| #define vmadd_vv_i8mf2 | ( | ... | ) | __riscv_vmadd_vv_i8mf2_tu(__VA_ARGS__) |
| #define vmadd_vv_i8mf2_m | ( | ... | ) | __riscv_vmadd_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vmadd_vv_i8mf4 | ( | ... | ) | __riscv_vmadd_vv_i8mf4_tu(__VA_ARGS__) |
| #define vmadd_vv_i8mf4_m | ( | ... | ) | __riscv_vmadd_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vmadd_vv_i8mf8 | ( | ... | ) | __riscv_vmadd_vv_i8mf8_tu(__VA_ARGS__) |
| #define vmadd_vv_i8mf8_m | ( | ... | ) | __riscv_vmadd_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vmadd_vv_u16m1 | ( | ... | ) | __riscv_vmadd_vv_u16m1_tu(__VA_ARGS__) |
| #define vmadd_vv_u16m1_m | ( | ... | ) | __riscv_vmadd_vv_u16m1_tumu(__VA_ARGS__) |
| #define vmadd_vv_u16m2 | ( | ... | ) | __riscv_vmadd_vv_u16m2_tu(__VA_ARGS__) |
| #define vmadd_vv_u16m2_m | ( | ... | ) | __riscv_vmadd_vv_u16m2_tumu(__VA_ARGS__) |
| #define vmadd_vv_u16m4 | ( | ... | ) | __riscv_vmadd_vv_u16m4_tu(__VA_ARGS__) |
| #define vmadd_vv_u16m4_m | ( | ... | ) | __riscv_vmadd_vv_u16m4_tumu(__VA_ARGS__) |
| #define vmadd_vv_u16m8 | ( | ... | ) | __riscv_vmadd_vv_u16m8_tu(__VA_ARGS__) |
| #define vmadd_vv_u16m8_m | ( | ... | ) | __riscv_vmadd_vv_u16m8_tumu(__VA_ARGS__) |
| #define vmadd_vv_u16mf2 | ( | ... | ) | __riscv_vmadd_vv_u16mf2_tu(__VA_ARGS__) |
| #define vmadd_vv_u16mf2_m | ( | ... | ) | __riscv_vmadd_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vmadd_vv_u16mf4 | ( | ... | ) | __riscv_vmadd_vv_u16mf4_tu(__VA_ARGS__) |
| #define vmadd_vv_u16mf4_m | ( | ... | ) | __riscv_vmadd_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vmadd_vv_u32m1 | ( | ... | ) | __riscv_vmadd_vv_u32m1_tu(__VA_ARGS__) |
| #define vmadd_vv_u32m1_m | ( | ... | ) | __riscv_vmadd_vv_u32m1_tumu(__VA_ARGS__) |
| #define vmadd_vv_u32m2 | ( | ... | ) | __riscv_vmadd_vv_u32m2_tu(__VA_ARGS__) |
| #define vmadd_vv_u32m2_m | ( | ... | ) | __riscv_vmadd_vv_u32m2_tumu(__VA_ARGS__) |
| #define vmadd_vv_u32m4 | ( | ... | ) | __riscv_vmadd_vv_u32m4_tu(__VA_ARGS__) |
| #define vmadd_vv_u32m4_m | ( | ... | ) | __riscv_vmadd_vv_u32m4_tumu(__VA_ARGS__) |
| #define vmadd_vv_u32m8 | ( | ... | ) | __riscv_vmadd_vv_u32m8_tu(__VA_ARGS__) |
| #define vmadd_vv_u32m8_m | ( | ... | ) | __riscv_vmadd_vv_u32m8_tumu(__VA_ARGS__) |
| #define vmadd_vv_u32mf2 | ( | ... | ) | __riscv_vmadd_vv_u32mf2_tu(__VA_ARGS__) |
| #define vmadd_vv_u32mf2_m | ( | ... | ) | __riscv_vmadd_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vmadd_vv_u64m1 | ( | ... | ) | __riscv_vmadd_vv_u64m1_tu(__VA_ARGS__) |
| #define vmadd_vv_u64m1_m | ( | ... | ) | __riscv_vmadd_vv_u64m1_tumu(__VA_ARGS__) |
| #define vmadd_vv_u64m2 | ( | ... | ) | __riscv_vmadd_vv_u64m2_tu(__VA_ARGS__) |
| #define vmadd_vv_u64m2_m | ( | ... | ) | __riscv_vmadd_vv_u64m2_tumu(__VA_ARGS__) |
| #define vmadd_vv_u64m4 | ( | ... | ) | __riscv_vmadd_vv_u64m4_tu(__VA_ARGS__) |
| #define vmadd_vv_u64m4_m | ( | ... | ) | __riscv_vmadd_vv_u64m4_tumu(__VA_ARGS__) |
| #define vmadd_vv_u64m8 | ( | ... | ) | __riscv_vmadd_vv_u64m8_tu(__VA_ARGS__) |
| #define vmadd_vv_u64m8_m | ( | ... | ) | __riscv_vmadd_vv_u64m8_tumu(__VA_ARGS__) |
| #define vmadd_vv_u8m1 | ( | ... | ) | __riscv_vmadd_vv_u8m1_tu(__VA_ARGS__) |
| #define vmadd_vv_u8m1_m | ( | ... | ) | __riscv_vmadd_vv_u8m1_tumu(__VA_ARGS__) |
| #define vmadd_vv_u8m2 | ( | ... | ) | __riscv_vmadd_vv_u8m2_tu(__VA_ARGS__) |
| #define vmadd_vv_u8m2_m | ( | ... | ) | __riscv_vmadd_vv_u8m2_tumu(__VA_ARGS__) |
| #define vmadd_vv_u8m4 | ( | ... | ) | __riscv_vmadd_vv_u8m4_tu(__VA_ARGS__) |
| #define vmadd_vv_u8m4_m | ( | ... | ) | __riscv_vmadd_vv_u8m4_tumu(__VA_ARGS__) |
| #define vmadd_vv_u8m8 | ( | ... | ) | __riscv_vmadd_vv_u8m8_tu(__VA_ARGS__) |
| #define vmadd_vv_u8m8_m | ( | ... | ) | __riscv_vmadd_vv_u8m8_tumu(__VA_ARGS__) |
| #define vmadd_vv_u8mf2 | ( | ... | ) | __riscv_vmadd_vv_u8mf2_tu(__VA_ARGS__) |
| #define vmadd_vv_u8mf2_m | ( | ... | ) | __riscv_vmadd_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vmadd_vv_u8mf4 | ( | ... | ) | __riscv_vmadd_vv_u8mf4_tu(__VA_ARGS__) |
| #define vmadd_vv_u8mf4_m | ( | ... | ) | __riscv_vmadd_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vmadd_vv_u8mf8 | ( | ... | ) | __riscv_vmadd_vv_u8mf8_tu(__VA_ARGS__) |
| #define vmadd_vv_u8mf8_m | ( | ... | ) | __riscv_vmadd_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vmadd_vx_i16m1 | ( | ... | ) | __riscv_vmadd_vx_i16m1_tu(__VA_ARGS__) |
| #define vmadd_vx_i16m1_m | ( | ... | ) | __riscv_vmadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define vmadd_vx_i16m2 | ( | ... | ) | __riscv_vmadd_vx_i16m2_tu(__VA_ARGS__) |
| #define vmadd_vx_i16m2_m | ( | ... | ) | __riscv_vmadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define vmadd_vx_i16m4 | ( | ... | ) | __riscv_vmadd_vx_i16m4_tu(__VA_ARGS__) |
| #define vmadd_vx_i16m4_m | ( | ... | ) | __riscv_vmadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define vmadd_vx_i16m8 | ( | ... | ) | __riscv_vmadd_vx_i16m8_tu(__VA_ARGS__) |
| #define vmadd_vx_i16m8_m | ( | ... | ) | __riscv_vmadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define vmadd_vx_i16mf2 | ( | ... | ) | __riscv_vmadd_vx_i16mf2_tu(__VA_ARGS__) |
| #define vmadd_vx_i16mf2_m | ( | ... | ) | __riscv_vmadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vmadd_vx_i16mf4 | ( | ... | ) | __riscv_vmadd_vx_i16mf4_tu(__VA_ARGS__) |
| #define vmadd_vx_i16mf4_m | ( | ... | ) | __riscv_vmadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vmadd_vx_i32m1 | ( | ... | ) | __riscv_vmadd_vx_i32m1_tu(__VA_ARGS__) |
| #define vmadd_vx_i32m1_m | ( | ... | ) | __riscv_vmadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define vmadd_vx_i32m2 | ( | ... | ) | __riscv_vmadd_vx_i32m2_tu(__VA_ARGS__) |
| #define vmadd_vx_i32m2_m | ( | ... | ) | __riscv_vmadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define vmadd_vx_i32m4 | ( | ... | ) | __riscv_vmadd_vx_i32m4_tu(__VA_ARGS__) |
| #define vmadd_vx_i32m4_m | ( | ... | ) | __riscv_vmadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define vmadd_vx_i32m8 | ( | ... | ) | __riscv_vmadd_vx_i32m8_tu(__VA_ARGS__) |
| #define vmadd_vx_i32m8_m | ( | ... | ) | __riscv_vmadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define vmadd_vx_i32mf2 | ( | ... | ) | __riscv_vmadd_vx_i32mf2_tu(__VA_ARGS__) |
| #define vmadd_vx_i32mf2_m | ( | ... | ) | __riscv_vmadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vmadd_vx_i64m1 | ( | ... | ) | __riscv_vmadd_vx_i64m1_tu(__VA_ARGS__) |
| #define vmadd_vx_i64m1_m | ( | ... | ) | __riscv_vmadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define vmadd_vx_i64m2 | ( | ... | ) | __riscv_vmadd_vx_i64m2_tu(__VA_ARGS__) |
| #define vmadd_vx_i64m2_m | ( | ... | ) | __riscv_vmadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define vmadd_vx_i64m4 | ( | ... | ) | __riscv_vmadd_vx_i64m4_tu(__VA_ARGS__) |
| #define vmadd_vx_i64m4_m | ( | ... | ) | __riscv_vmadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define vmadd_vx_i64m8 | ( | ... | ) | __riscv_vmadd_vx_i64m8_tu(__VA_ARGS__) |
| #define vmadd_vx_i64m8_m | ( | ... | ) | __riscv_vmadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define vmadd_vx_i8m1 | ( | ... | ) | __riscv_vmadd_vx_i8m1_tu(__VA_ARGS__) |
| #define vmadd_vx_i8m1_m | ( | ... | ) | __riscv_vmadd_vx_i8m1_tumu(__VA_ARGS__) |
| #define vmadd_vx_i8m2 | ( | ... | ) | __riscv_vmadd_vx_i8m2_tu(__VA_ARGS__) |
| #define vmadd_vx_i8m2_m | ( | ... | ) | __riscv_vmadd_vx_i8m2_tumu(__VA_ARGS__) |
| #define vmadd_vx_i8m4 | ( | ... | ) | __riscv_vmadd_vx_i8m4_tu(__VA_ARGS__) |
| #define vmadd_vx_i8m4_m | ( | ... | ) | __riscv_vmadd_vx_i8m4_tumu(__VA_ARGS__) |
| #define vmadd_vx_i8m8 | ( | ... | ) | __riscv_vmadd_vx_i8m8_tu(__VA_ARGS__) |
| #define vmadd_vx_i8m8_m | ( | ... | ) | __riscv_vmadd_vx_i8m8_tumu(__VA_ARGS__) |
| #define vmadd_vx_i8mf2 | ( | ... | ) | __riscv_vmadd_vx_i8mf2_tu(__VA_ARGS__) |
| #define vmadd_vx_i8mf2_m | ( | ... | ) | __riscv_vmadd_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vmadd_vx_i8mf4 | ( | ... | ) | __riscv_vmadd_vx_i8mf4_tu(__VA_ARGS__) |
| #define vmadd_vx_i8mf4_m | ( | ... | ) | __riscv_vmadd_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vmadd_vx_i8mf8 | ( | ... | ) | __riscv_vmadd_vx_i8mf8_tu(__VA_ARGS__) |
| #define vmadd_vx_i8mf8_m | ( | ... | ) | __riscv_vmadd_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vmadd_vx_u16m1 | ( | ... | ) | __riscv_vmadd_vx_u16m1_tu(__VA_ARGS__) |
| #define vmadd_vx_u16m1_m | ( | ... | ) | __riscv_vmadd_vx_u16m1_tumu(__VA_ARGS__) |
| #define vmadd_vx_u16m2 | ( | ... | ) | __riscv_vmadd_vx_u16m2_tu(__VA_ARGS__) |
| #define vmadd_vx_u16m2_m | ( | ... | ) | __riscv_vmadd_vx_u16m2_tumu(__VA_ARGS__) |
| #define vmadd_vx_u16m4 | ( | ... | ) | __riscv_vmadd_vx_u16m4_tu(__VA_ARGS__) |
| #define vmadd_vx_u16m4_m | ( | ... | ) | __riscv_vmadd_vx_u16m4_tumu(__VA_ARGS__) |
| #define vmadd_vx_u16m8 | ( | ... | ) | __riscv_vmadd_vx_u16m8_tu(__VA_ARGS__) |
| #define vmadd_vx_u16m8_m | ( | ... | ) | __riscv_vmadd_vx_u16m8_tumu(__VA_ARGS__) |
| #define vmadd_vx_u16mf2 | ( | ... | ) | __riscv_vmadd_vx_u16mf2_tu(__VA_ARGS__) |
| #define vmadd_vx_u16mf2_m | ( | ... | ) | __riscv_vmadd_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vmadd_vx_u16mf4 | ( | ... | ) | __riscv_vmadd_vx_u16mf4_tu(__VA_ARGS__) |
| #define vmadd_vx_u16mf4_m | ( | ... | ) | __riscv_vmadd_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vmadd_vx_u32m1 | ( | ... | ) | __riscv_vmadd_vx_u32m1_tu(__VA_ARGS__) |
| #define vmadd_vx_u32m1_m | ( | ... | ) | __riscv_vmadd_vx_u32m1_tumu(__VA_ARGS__) |
| #define vmadd_vx_u32m2 | ( | ... | ) | __riscv_vmadd_vx_u32m2_tu(__VA_ARGS__) |
| #define vmadd_vx_u32m2_m | ( | ... | ) | __riscv_vmadd_vx_u32m2_tumu(__VA_ARGS__) |
| #define vmadd_vx_u32m4 | ( | ... | ) | __riscv_vmadd_vx_u32m4_tu(__VA_ARGS__) |
| #define vmadd_vx_u32m4_m | ( | ... | ) | __riscv_vmadd_vx_u32m4_tumu(__VA_ARGS__) |
| #define vmadd_vx_u32m8 | ( | ... | ) | __riscv_vmadd_vx_u32m8_tu(__VA_ARGS__) |
| #define vmadd_vx_u32m8_m | ( | ... | ) | __riscv_vmadd_vx_u32m8_tumu(__VA_ARGS__) |
| #define vmadd_vx_u32mf2 | ( | ... | ) | __riscv_vmadd_vx_u32mf2_tu(__VA_ARGS__) |
| #define vmadd_vx_u32mf2_m | ( | ... | ) | __riscv_vmadd_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vmadd_vx_u64m1 | ( | ... | ) | __riscv_vmadd_vx_u64m1_tu(__VA_ARGS__) |
| #define vmadd_vx_u64m1_m | ( | ... | ) | __riscv_vmadd_vx_u64m1_tumu(__VA_ARGS__) |
| #define vmadd_vx_u64m2 | ( | ... | ) | __riscv_vmadd_vx_u64m2_tu(__VA_ARGS__) |
| #define vmadd_vx_u64m2_m | ( | ... | ) | __riscv_vmadd_vx_u64m2_tumu(__VA_ARGS__) |
| #define vmadd_vx_u64m4 | ( | ... | ) | __riscv_vmadd_vx_u64m4_tu(__VA_ARGS__) |
| #define vmadd_vx_u64m4_m | ( | ... | ) | __riscv_vmadd_vx_u64m4_tumu(__VA_ARGS__) |
| #define vmadd_vx_u64m8 | ( | ... | ) | __riscv_vmadd_vx_u64m8_tu(__VA_ARGS__) |
| #define vmadd_vx_u64m8_m | ( | ... | ) | __riscv_vmadd_vx_u64m8_tumu(__VA_ARGS__) |
| #define vmadd_vx_u8m1 | ( | ... | ) | __riscv_vmadd_vx_u8m1_tu(__VA_ARGS__) |
| #define vmadd_vx_u8m1_m | ( | ... | ) | __riscv_vmadd_vx_u8m1_tumu(__VA_ARGS__) |
| #define vmadd_vx_u8m2 | ( | ... | ) | __riscv_vmadd_vx_u8m2_tu(__VA_ARGS__) |
| #define vmadd_vx_u8m2_m | ( | ... | ) | __riscv_vmadd_vx_u8m2_tumu(__VA_ARGS__) |
| #define vmadd_vx_u8m4 | ( | ... | ) | __riscv_vmadd_vx_u8m4_tu(__VA_ARGS__) |
| #define vmadd_vx_u8m4_m | ( | ... | ) | __riscv_vmadd_vx_u8m4_tumu(__VA_ARGS__) |
| #define vmadd_vx_u8m8 | ( | ... | ) | __riscv_vmadd_vx_u8m8_tu(__VA_ARGS__) |
| #define vmadd_vx_u8m8_m | ( | ... | ) | __riscv_vmadd_vx_u8m8_tumu(__VA_ARGS__) |
| #define vmadd_vx_u8mf2 | ( | ... | ) | __riscv_vmadd_vx_u8mf2_tu(__VA_ARGS__) |
| #define vmadd_vx_u8mf2_m | ( | ... | ) | __riscv_vmadd_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vmadd_vx_u8mf4 | ( | ... | ) | __riscv_vmadd_vx_u8mf4_tu(__VA_ARGS__) |
| #define vmadd_vx_u8mf4_m | ( | ... | ) | __riscv_vmadd_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vmadd_vx_u8mf8 | ( | ... | ) | __riscv_vmadd_vx_u8mf8_tu(__VA_ARGS__) |
| #define vmadd_vx_u8mf8_m | ( | ... | ) | __riscv_vmadd_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vmand_mm_b1 | ( | ... | ) | __riscv_vmand_mm_b1(__VA_ARGS__) |
| #define vmand_mm_b16 | ( | ... | ) | __riscv_vmand_mm_b16(__VA_ARGS__) |
| #define vmand_mm_b2 | ( | ... | ) | __riscv_vmand_mm_b2(__VA_ARGS__) |
| #define vmand_mm_b32 | ( | ... | ) | __riscv_vmand_mm_b32(__VA_ARGS__) |
| #define vmand_mm_b4 | ( | ... | ) | __riscv_vmand_mm_b4(__VA_ARGS__) |
| #define vmand_mm_b64 | ( | ... | ) | __riscv_vmand_mm_b64(__VA_ARGS__) |
| #define vmand_mm_b8 | ( | ... | ) | __riscv_vmand_mm_b8(__VA_ARGS__) |
| #define vmandn_mm_b1 | ( | ... | ) | __riscv_vmandn_mm_b1(__VA_ARGS__) |
| #define vmandn_mm_b16 | ( | ... | ) | __riscv_vmandn_mm_b16(__VA_ARGS__) |
| #define vmandn_mm_b2 | ( | ... | ) | __riscv_vmandn_mm_b2(__VA_ARGS__) |
| #define vmandn_mm_b32 | ( | ... | ) | __riscv_vmandn_mm_b32(__VA_ARGS__) |
| #define vmandn_mm_b4 | ( | ... | ) | __riscv_vmandn_mm_b4(__VA_ARGS__) |
| #define vmandn_mm_b64 | ( | ... | ) | __riscv_vmandn_mm_b64(__VA_ARGS__) |
| #define vmandn_mm_b8 | ( | ... | ) | __riscv_vmandn_mm_b8(__VA_ARGS__) |
| #define vmax_vv_i16m1 | ( | ... | ) | __riscv_vmax_vv_i16m1(__VA_ARGS__) |
| #define vmax_vv_i16m1_m | ( | ... | ) | __riscv_vmax_vv_i16m1_tumu(__VA_ARGS__) |
| #define vmax_vv_i16m2 | ( | ... | ) | __riscv_vmax_vv_i16m2(__VA_ARGS__) |
| #define vmax_vv_i16m2_m | ( | ... | ) | __riscv_vmax_vv_i16m2_tumu(__VA_ARGS__) |
| #define vmax_vv_i16m4 | ( | ... | ) | __riscv_vmax_vv_i16m4(__VA_ARGS__) |
| #define vmax_vv_i16m4_m | ( | ... | ) | __riscv_vmax_vv_i16m4_tumu(__VA_ARGS__) |
| #define vmax_vv_i16m8 | ( | ... | ) | __riscv_vmax_vv_i16m8(__VA_ARGS__) |
| #define vmax_vv_i16m8_m | ( | ... | ) | __riscv_vmax_vv_i16m8_tumu(__VA_ARGS__) |
| #define vmax_vv_i16mf2 | ( | ... | ) | __riscv_vmax_vv_i16mf2(__VA_ARGS__) |
| #define vmax_vv_i16mf2_m | ( | ... | ) | __riscv_vmax_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vmax_vv_i16mf4 | ( | ... | ) | __riscv_vmax_vv_i16mf4(__VA_ARGS__) |
| #define vmax_vv_i16mf4_m | ( | ... | ) | __riscv_vmax_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vmax_vv_i32m1 | ( | ... | ) | __riscv_vmax_vv_i32m1(__VA_ARGS__) |
| #define vmax_vv_i32m1_m | ( | ... | ) | __riscv_vmax_vv_i32m1_tumu(__VA_ARGS__) |
| #define vmax_vv_i32m2 | ( | ... | ) | __riscv_vmax_vv_i32m2(__VA_ARGS__) |
| #define vmax_vv_i32m2_m | ( | ... | ) | __riscv_vmax_vv_i32m2_tumu(__VA_ARGS__) |
| #define vmax_vv_i32m4 | ( | ... | ) | __riscv_vmax_vv_i32m4(__VA_ARGS__) |
| #define vmax_vv_i32m4_m | ( | ... | ) | __riscv_vmax_vv_i32m4_tumu(__VA_ARGS__) |
| #define vmax_vv_i32m8 | ( | ... | ) | __riscv_vmax_vv_i32m8(__VA_ARGS__) |
| #define vmax_vv_i32m8_m | ( | ... | ) | __riscv_vmax_vv_i32m8_tumu(__VA_ARGS__) |
| #define vmax_vv_i32mf2 | ( | ... | ) | __riscv_vmax_vv_i32mf2(__VA_ARGS__) |
| #define vmax_vv_i32mf2_m | ( | ... | ) | __riscv_vmax_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vmax_vv_i64m1 | ( | ... | ) | __riscv_vmax_vv_i64m1(__VA_ARGS__) |
| #define vmax_vv_i64m1_m | ( | ... | ) | __riscv_vmax_vv_i64m1_tumu(__VA_ARGS__) |
| #define vmax_vv_i64m2 | ( | ... | ) | __riscv_vmax_vv_i64m2(__VA_ARGS__) |
| #define vmax_vv_i64m2_m | ( | ... | ) | __riscv_vmax_vv_i64m2_tumu(__VA_ARGS__) |
| #define vmax_vv_i64m4 | ( | ... | ) | __riscv_vmax_vv_i64m4(__VA_ARGS__) |
| #define vmax_vv_i64m4_m | ( | ... | ) | __riscv_vmax_vv_i64m4_tumu(__VA_ARGS__) |
| #define vmax_vv_i64m8 | ( | ... | ) | __riscv_vmax_vv_i64m8(__VA_ARGS__) |
| #define vmax_vv_i64m8_m | ( | ... | ) | __riscv_vmax_vv_i64m8_tumu(__VA_ARGS__) |
| #define vmax_vv_i8m1 | ( | ... | ) | __riscv_vmax_vv_i8m1(__VA_ARGS__) |
| #define vmax_vv_i8m1_m | ( | ... | ) | __riscv_vmax_vv_i8m1_tumu(__VA_ARGS__) |
| #define vmax_vv_i8m2 | ( | ... | ) | __riscv_vmax_vv_i8m2(__VA_ARGS__) |
| #define vmax_vv_i8m2_m | ( | ... | ) | __riscv_vmax_vv_i8m2_tumu(__VA_ARGS__) |
| #define vmax_vv_i8m4 | ( | ... | ) | __riscv_vmax_vv_i8m4(__VA_ARGS__) |
| #define vmax_vv_i8m4_m | ( | ... | ) | __riscv_vmax_vv_i8m4_tumu(__VA_ARGS__) |
| #define vmax_vv_i8m8 | ( | ... | ) | __riscv_vmax_vv_i8m8(__VA_ARGS__) |
| #define vmax_vv_i8m8_m | ( | ... | ) | __riscv_vmax_vv_i8m8_tumu(__VA_ARGS__) |
| #define vmax_vv_i8mf2 | ( | ... | ) | __riscv_vmax_vv_i8mf2(__VA_ARGS__) |
| #define vmax_vv_i8mf2_m | ( | ... | ) | __riscv_vmax_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vmax_vv_i8mf4 | ( | ... | ) | __riscv_vmax_vv_i8mf4(__VA_ARGS__) |
| #define vmax_vv_i8mf4_m | ( | ... | ) | __riscv_vmax_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vmax_vv_i8mf8 | ( | ... | ) | __riscv_vmax_vv_i8mf8(__VA_ARGS__) |
| #define vmax_vv_i8mf8_m | ( | ... | ) | __riscv_vmax_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vmax_vx_i16m1 | ( | ... | ) | __riscv_vmax_vx_i16m1(__VA_ARGS__) |
| #define vmax_vx_i16m1_m | ( | ... | ) | __riscv_vmax_vx_i16m1_tumu(__VA_ARGS__) |
| #define vmax_vx_i16m2 | ( | ... | ) | __riscv_vmax_vx_i16m2(__VA_ARGS__) |
| #define vmax_vx_i16m2_m | ( | ... | ) | __riscv_vmax_vx_i16m2_tumu(__VA_ARGS__) |
| #define vmax_vx_i16m4 | ( | ... | ) | __riscv_vmax_vx_i16m4(__VA_ARGS__) |
| #define vmax_vx_i16m4_m | ( | ... | ) | __riscv_vmax_vx_i16m4_tumu(__VA_ARGS__) |
| #define vmax_vx_i16m8 | ( | ... | ) | __riscv_vmax_vx_i16m8(__VA_ARGS__) |
| #define vmax_vx_i16m8_m | ( | ... | ) | __riscv_vmax_vx_i16m8_tumu(__VA_ARGS__) |
| #define vmax_vx_i16mf2 | ( | ... | ) | __riscv_vmax_vx_i16mf2(__VA_ARGS__) |
| #define vmax_vx_i16mf2_m | ( | ... | ) | __riscv_vmax_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vmax_vx_i16mf4 | ( | ... | ) | __riscv_vmax_vx_i16mf4(__VA_ARGS__) |
| #define vmax_vx_i16mf4_m | ( | ... | ) | __riscv_vmax_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vmax_vx_i32m1 | ( | ... | ) | __riscv_vmax_vx_i32m1(__VA_ARGS__) |
| #define vmax_vx_i32m1_m | ( | ... | ) | __riscv_vmax_vx_i32m1_tumu(__VA_ARGS__) |
| #define vmax_vx_i32m2 | ( | ... | ) | __riscv_vmax_vx_i32m2(__VA_ARGS__) |
| #define vmax_vx_i32m2_m | ( | ... | ) | __riscv_vmax_vx_i32m2_tumu(__VA_ARGS__) |
| #define vmax_vx_i32m4 | ( | ... | ) | __riscv_vmax_vx_i32m4(__VA_ARGS__) |
| #define vmax_vx_i32m4_m | ( | ... | ) | __riscv_vmax_vx_i32m4_tumu(__VA_ARGS__) |
| #define vmax_vx_i32m8 | ( | ... | ) | __riscv_vmax_vx_i32m8(__VA_ARGS__) |
| #define vmax_vx_i32m8_m | ( | ... | ) | __riscv_vmax_vx_i32m8_tumu(__VA_ARGS__) |
| #define vmax_vx_i32mf2 | ( | ... | ) | __riscv_vmax_vx_i32mf2(__VA_ARGS__) |
| #define vmax_vx_i32mf2_m | ( | ... | ) | __riscv_vmax_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vmax_vx_i64m1 | ( | ... | ) | __riscv_vmax_vx_i64m1(__VA_ARGS__) |
| #define vmax_vx_i64m1_m | ( | ... | ) | __riscv_vmax_vx_i64m1_tumu(__VA_ARGS__) |
| #define vmax_vx_i64m2 | ( | ... | ) | __riscv_vmax_vx_i64m2(__VA_ARGS__) |
| #define vmax_vx_i64m2_m | ( | ... | ) | __riscv_vmax_vx_i64m2_tumu(__VA_ARGS__) |
| #define vmax_vx_i64m4 | ( | ... | ) | __riscv_vmax_vx_i64m4(__VA_ARGS__) |
| #define vmax_vx_i64m4_m | ( | ... | ) | __riscv_vmax_vx_i64m4_tumu(__VA_ARGS__) |
| #define vmax_vx_i64m8 | ( | ... | ) | __riscv_vmax_vx_i64m8(__VA_ARGS__) |
| #define vmax_vx_i64m8_m | ( | ... | ) | __riscv_vmax_vx_i64m8_tumu(__VA_ARGS__) |
| #define vmax_vx_i8m1 | ( | ... | ) | __riscv_vmax_vx_i8m1(__VA_ARGS__) |
| #define vmax_vx_i8m1_m | ( | ... | ) | __riscv_vmax_vx_i8m1_tumu(__VA_ARGS__) |
| #define vmax_vx_i8m2 | ( | ... | ) | __riscv_vmax_vx_i8m2(__VA_ARGS__) |
| #define vmax_vx_i8m2_m | ( | ... | ) | __riscv_vmax_vx_i8m2_tumu(__VA_ARGS__) |
| #define vmax_vx_i8m4 | ( | ... | ) | __riscv_vmax_vx_i8m4(__VA_ARGS__) |
| #define vmax_vx_i8m4_m | ( | ... | ) | __riscv_vmax_vx_i8m4_tumu(__VA_ARGS__) |
| #define vmax_vx_i8m8 | ( | ... | ) | __riscv_vmax_vx_i8m8(__VA_ARGS__) |
| #define vmax_vx_i8m8_m | ( | ... | ) | __riscv_vmax_vx_i8m8_tumu(__VA_ARGS__) |
| #define vmax_vx_i8mf2 | ( | ... | ) | __riscv_vmax_vx_i8mf2(__VA_ARGS__) |
| #define vmax_vx_i8mf2_m | ( | ... | ) | __riscv_vmax_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vmax_vx_i8mf4 | ( | ... | ) | __riscv_vmax_vx_i8mf4(__VA_ARGS__) |
| #define vmax_vx_i8mf4_m | ( | ... | ) | __riscv_vmax_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vmax_vx_i8mf8 | ( | ... | ) | __riscv_vmax_vx_i8mf8(__VA_ARGS__) |
| #define vmax_vx_i8mf8_m | ( | ... | ) | __riscv_vmax_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u16m1 | ( | ... | ) | __riscv_vmaxu_vv_u16m1(__VA_ARGS__) |
| #define vmaxu_vv_u16m1_m | ( | ... | ) | __riscv_vmaxu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u16m2 | ( | ... | ) | __riscv_vmaxu_vv_u16m2(__VA_ARGS__) |
| #define vmaxu_vv_u16m2_m | ( | ... | ) | __riscv_vmaxu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u16m4 | ( | ... | ) | __riscv_vmaxu_vv_u16m4(__VA_ARGS__) |
| #define vmaxu_vv_u16m4_m | ( | ... | ) | __riscv_vmaxu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u16m8 | ( | ... | ) | __riscv_vmaxu_vv_u16m8(__VA_ARGS__) |
| #define vmaxu_vv_u16m8_m | ( | ... | ) | __riscv_vmaxu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u16mf2 | ( | ... | ) | __riscv_vmaxu_vv_u16mf2(__VA_ARGS__) |
| #define vmaxu_vv_u16mf2_m | ( | ... | ) | __riscv_vmaxu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u16mf4 | ( | ... | ) | __riscv_vmaxu_vv_u16mf4(__VA_ARGS__) |
| #define vmaxu_vv_u16mf4_m | ( | ... | ) | __riscv_vmaxu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u32m1 | ( | ... | ) | __riscv_vmaxu_vv_u32m1(__VA_ARGS__) |
| #define vmaxu_vv_u32m1_m | ( | ... | ) | __riscv_vmaxu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u32m2 | ( | ... | ) | __riscv_vmaxu_vv_u32m2(__VA_ARGS__) |
| #define vmaxu_vv_u32m2_m | ( | ... | ) | __riscv_vmaxu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u32m4 | ( | ... | ) | __riscv_vmaxu_vv_u32m4(__VA_ARGS__) |
| #define vmaxu_vv_u32m4_m | ( | ... | ) | __riscv_vmaxu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u32m8 | ( | ... | ) | __riscv_vmaxu_vv_u32m8(__VA_ARGS__) |
| #define vmaxu_vv_u32m8_m | ( | ... | ) | __riscv_vmaxu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u32mf2 | ( | ... | ) | __riscv_vmaxu_vv_u32mf2(__VA_ARGS__) |
| #define vmaxu_vv_u32mf2_m | ( | ... | ) | __riscv_vmaxu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u64m1 | ( | ... | ) | __riscv_vmaxu_vv_u64m1(__VA_ARGS__) |
| #define vmaxu_vv_u64m1_m | ( | ... | ) | __riscv_vmaxu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u64m2 | ( | ... | ) | __riscv_vmaxu_vv_u64m2(__VA_ARGS__) |
| #define vmaxu_vv_u64m2_m | ( | ... | ) | __riscv_vmaxu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u64m4 | ( | ... | ) | __riscv_vmaxu_vv_u64m4(__VA_ARGS__) |
| #define vmaxu_vv_u64m4_m | ( | ... | ) | __riscv_vmaxu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u64m8 | ( | ... | ) | __riscv_vmaxu_vv_u64m8(__VA_ARGS__) |
| #define vmaxu_vv_u64m8_m | ( | ... | ) | __riscv_vmaxu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u8m1 | ( | ... | ) | __riscv_vmaxu_vv_u8m1(__VA_ARGS__) |
| #define vmaxu_vv_u8m1_m | ( | ... | ) | __riscv_vmaxu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u8m2 | ( | ... | ) | __riscv_vmaxu_vv_u8m2(__VA_ARGS__) |
| #define vmaxu_vv_u8m2_m | ( | ... | ) | __riscv_vmaxu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u8m4 | ( | ... | ) | __riscv_vmaxu_vv_u8m4(__VA_ARGS__) |
| #define vmaxu_vv_u8m4_m | ( | ... | ) | __riscv_vmaxu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u8m8 | ( | ... | ) | __riscv_vmaxu_vv_u8m8(__VA_ARGS__) |
| #define vmaxu_vv_u8m8_m | ( | ... | ) | __riscv_vmaxu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u8mf2 | ( | ... | ) | __riscv_vmaxu_vv_u8mf2(__VA_ARGS__) |
| #define vmaxu_vv_u8mf2_m | ( | ... | ) | __riscv_vmaxu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u8mf4 | ( | ... | ) | __riscv_vmaxu_vv_u8mf4(__VA_ARGS__) |
| #define vmaxu_vv_u8mf4_m | ( | ... | ) | __riscv_vmaxu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vmaxu_vv_u8mf8 | ( | ... | ) | __riscv_vmaxu_vv_u8mf8(__VA_ARGS__) |
| #define vmaxu_vv_u8mf8_m | ( | ... | ) | __riscv_vmaxu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u16m1 | ( | ... | ) | __riscv_vmaxu_vx_u16m1(__VA_ARGS__) |
| #define vmaxu_vx_u16m1_m | ( | ... | ) | __riscv_vmaxu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u16m2 | ( | ... | ) | __riscv_vmaxu_vx_u16m2(__VA_ARGS__) |
| #define vmaxu_vx_u16m2_m | ( | ... | ) | __riscv_vmaxu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u16m4 | ( | ... | ) | __riscv_vmaxu_vx_u16m4(__VA_ARGS__) |
| #define vmaxu_vx_u16m4_m | ( | ... | ) | __riscv_vmaxu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u16m8 | ( | ... | ) | __riscv_vmaxu_vx_u16m8(__VA_ARGS__) |
| #define vmaxu_vx_u16m8_m | ( | ... | ) | __riscv_vmaxu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u16mf2 | ( | ... | ) | __riscv_vmaxu_vx_u16mf2(__VA_ARGS__) |
| #define vmaxu_vx_u16mf2_m | ( | ... | ) | __riscv_vmaxu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u16mf4 | ( | ... | ) | __riscv_vmaxu_vx_u16mf4(__VA_ARGS__) |
| #define vmaxu_vx_u16mf4_m | ( | ... | ) | __riscv_vmaxu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u32m1 | ( | ... | ) | __riscv_vmaxu_vx_u32m1(__VA_ARGS__) |
| #define vmaxu_vx_u32m1_m | ( | ... | ) | __riscv_vmaxu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u32m2 | ( | ... | ) | __riscv_vmaxu_vx_u32m2(__VA_ARGS__) |
| #define vmaxu_vx_u32m2_m | ( | ... | ) | __riscv_vmaxu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u32m4 | ( | ... | ) | __riscv_vmaxu_vx_u32m4(__VA_ARGS__) |
| #define vmaxu_vx_u32m4_m | ( | ... | ) | __riscv_vmaxu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u32m8 | ( | ... | ) | __riscv_vmaxu_vx_u32m8(__VA_ARGS__) |
| #define vmaxu_vx_u32m8_m | ( | ... | ) | __riscv_vmaxu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u32mf2 | ( | ... | ) | __riscv_vmaxu_vx_u32mf2(__VA_ARGS__) |
| #define vmaxu_vx_u32mf2_m | ( | ... | ) | __riscv_vmaxu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u64m1 | ( | ... | ) | __riscv_vmaxu_vx_u64m1(__VA_ARGS__) |
| #define vmaxu_vx_u64m1_m | ( | ... | ) | __riscv_vmaxu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u64m2 | ( | ... | ) | __riscv_vmaxu_vx_u64m2(__VA_ARGS__) |
| #define vmaxu_vx_u64m2_m | ( | ... | ) | __riscv_vmaxu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u64m4 | ( | ... | ) | __riscv_vmaxu_vx_u64m4(__VA_ARGS__) |
| #define vmaxu_vx_u64m4_m | ( | ... | ) | __riscv_vmaxu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u64m8 | ( | ... | ) | __riscv_vmaxu_vx_u64m8(__VA_ARGS__) |
| #define vmaxu_vx_u64m8_m | ( | ... | ) | __riscv_vmaxu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u8m1 | ( | ... | ) | __riscv_vmaxu_vx_u8m1(__VA_ARGS__) |
| #define vmaxu_vx_u8m1_m | ( | ... | ) | __riscv_vmaxu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u8m2 | ( | ... | ) | __riscv_vmaxu_vx_u8m2(__VA_ARGS__) |
| #define vmaxu_vx_u8m2_m | ( | ... | ) | __riscv_vmaxu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u8m4 | ( | ... | ) | __riscv_vmaxu_vx_u8m4(__VA_ARGS__) |
| #define vmaxu_vx_u8m4_m | ( | ... | ) | __riscv_vmaxu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u8m8 | ( | ... | ) | __riscv_vmaxu_vx_u8m8(__VA_ARGS__) |
| #define vmaxu_vx_u8m8_m | ( | ... | ) | __riscv_vmaxu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u8mf2 | ( | ... | ) | __riscv_vmaxu_vx_u8mf2(__VA_ARGS__) |
| #define vmaxu_vx_u8mf2_m | ( | ... | ) | __riscv_vmaxu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u8mf4 | ( | ... | ) | __riscv_vmaxu_vx_u8mf4(__VA_ARGS__) |
| #define vmaxu_vx_u8mf4_m | ( | ... | ) | __riscv_vmaxu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vmaxu_vx_u8mf8 | ( | ... | ) | __riscv_vmaxu_vx_u8mf8(__VA_ARGS__) |
| #define vmaxu_vx_u8mf8_m | ( | ... | ) | __riscv_vmaxu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vmclr_m_b1 | ( | ... | ) | __riscv_vmclr_m_b1(__VA_ARGS__) |
| #define vmclr_m_b16 | ( | ... | ) | __riscv_vmclr_m_b16(__VA_ARGS__) |
| #define vmclr_m_b2 | ( | ... | ) | __riscv_vmclr_m_b2(__VA_ARGS__) |
| #define vmclr_m_b32 | ( | ... | ) | __riscv_vmclr_m_b32(__VA_ARGS__) |
| #define vmclr_m_b4 | ( | ... | ) | __riscv_vmclr_m_b4(__VA_ARGS__) |
| #define vmclr_m_b64 | ( | ... | ) | __riscv_vmclr_m_b64(__VA_ARGS__) |
| #define vmclr_m_b8 | ( | ... | ) | __riscv_vmclr_m_b8(__VA_ARGS__) |
| #define vmerge_vvm_f16m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f16m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f16m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f16m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f16m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f16m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f16m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f16m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f16mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f16mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f16mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f16mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f32m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f32m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f32m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f32m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f32m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f32m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f32m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f32m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f32mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f32mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f64m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f64m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f64m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f64m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f64m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f64m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_f64m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_f64m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i16m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i16m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i16m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i16m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i16m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i16m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i16m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i16m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i16mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i16mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i16mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i16mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i32m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i32m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i32m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i32m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i32m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i32m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i32m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i32m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i32mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i32mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i64m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i64m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i64m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i64m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i64m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i64m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i64m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i64m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i8m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i8m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i8m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i8m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i8m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i8m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i8m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i8m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i8mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i8mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i8mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i8mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_i8mf8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_i8mf8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u16m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u16m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u16m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u16m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u16m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u16m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u16m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u16m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u16mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u16mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u16mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u16mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u32m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u32m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u32m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u32m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u32m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u32m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u32m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u32m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u32mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u32mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u64m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u64m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u64m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u64m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u64m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u64m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u64m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u64m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u8m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u8m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u8m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u8m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u8m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u8m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u8m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u8m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u8mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u8mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u8mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u8mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vvm_u8mf8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vvm_u8mf8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i16m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i16m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i16m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i16m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i16m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i16m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i16m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i16m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i16mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i16mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i16mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i16mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i32m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i32m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i32m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i32m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i32m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i32m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i32m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i32m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i32mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i32mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i64m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i64m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i64m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i64m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i64m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i64m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i64m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i64m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i8m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i8m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i8m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i8m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i8m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i8m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i8m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i8m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i8mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i8mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i8mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i8mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_i8mf8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_i8mf8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u16m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u16m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u16m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u16m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u16m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u16m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u16m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u16m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u16mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u16mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u16mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u16mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u32m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u32m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u32m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u32m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u32m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u32m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u32m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u32m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u32mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u32mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u64m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u64m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u64m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u64m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u64m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u64m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u64m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u64m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u8m1 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u8m1((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u8m2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u8m2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u8m4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u8m4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u8m8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u8m8((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u8mf2 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u8mf2((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u8mf4 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u8mf4((op1), (op2), (mask), (vl)) |
| #define vmerge_vxm_u8mf8 | ( | mask, | |
| op1, | |||
| op2, | |||
| vl | |||
| ) | __riscv_vmerge_vxm_u8mf8((op1), (op2), (mask), (vl)) |
| #define vmfeq_vf_f16m1_b16 | ( | ... | ) | __riscv_vmfeq_vf_f16m1_b16(__VA_ARGS__) |
| #define vmfeq_vf_f16m1_b16_m | ( | ... | ) | __riscv_vmfeq_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfeq_vf_f16m2_b8 | ( | ... | ) | __riscv_vmfeq_vf_f16m2_b8(__VA_ARGS__) |
| #define vmfeq_vf_f16m2_b8_m | ( | ... | ) | __riscv_vmfeq_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfeq_vf_f16m4_b4 | ( | ... | ) | __riscv_vmfeq_vf_f16m4_b4(__VA_ARGS__) |
| #define vmfeq_vf_f16m4_b4_m | ( | ... | ) | __riscv_vmfeq_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfeq_vf_f16m8_b2 | ( | ... | ) | __riscv_vmfeq_vf_f16m8_b2(__VA_ARGS__) |
| #define vmfeq_vf_f16m8_b2_m | ( | ... | ) | __riscv_vmfeq_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfeq_vf_f16mf2_b32 | ( | ... | ) | __riscv_vmfeq_vf_f16mf2_b32(__VA_ARGS__) |
| #define vmfeq_vf_f16mf2_b32_m | ( | ... | ) | __riscv_vmfeq_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfeq_vf_f16mf4_b64 | ( | ... | ) | __riscv_vmfeq_vf_f16mf4_b64(__VA_ARGS__) |
| #define vmfeq_vf_f16mf4_b64_m | ( | ... | ) | __riscv_vmfeq_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfeq_vf_f32m1_b32 | ( | ... | ) | __riscv_vmfeq_vf_f32m1_b32(__VA_ARGS__) |
| #define vmfeq_vf_f32m1_b32_m | ( | ... | ) | __riscv_vmfeq_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfeq_vf_f32m2_b16 | ( | ... | ) | __riscv_vmfeq_vf_f32m2_b16(__VA_ARGS__) |
| #define vmfeq_vf_f32m2_b16_m | ( | ... | ) | __riscv_vmfeq_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfeq_vf_f32m4_b8 | ( | ... | ) | __riscv_vmfeq_vf_f32m4_b8(__VA_ARGS__) |
| #define vmfeq_vf_f32m4_b8_m | ( | ... | ) | __riscv_vmfeq_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfeq_vf_f32m8_b4 | ( | ... | ) | __riscv_vmfeq_vf_f32m8_b4(__VA_ARGS__) |
| #define vmfeq_vf_f32m8_b4_m | ( | ... | ) | __riscv_vmfeq_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfeq_vf_f32mf2_b64 | ( | ... | ) | __riscv_vmfeq_vf_f32mf2_b64(__VA_ARGS__) |
| #define vmfeq_vf_f32mf2_b64_m | ( | ... | ) | __riscv_vmfeq_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfeq_vf_f64m1_b64 | ( | ... | ) | __riscv_vmfeq_vf_f64m1_b64(__VA_ARGS__) |
| #define vmfeq_vf_f64m1_b64_m | ( | ... | ) | __riscv_vmfeq_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfeq_vf_f64m2_b32 | ( | ... | ) | __riscv_vmfeq_vf_f64m2_b32(__VA_ARGS__) |
| #define vmfeq_vf_f64m2_b32_m | ( | ... | ) | __riscv_vmfeq_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfeq_vf_f64m4_b16 | ( | ... | ) | __riscv_vmfeq_vf_f64m4_b16(__VA_ARGS__) |
| #define vmfeq_vf_f64m4_b16_m | ( | ... | ) | __riscv_vmfeq_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfeq_vf_f64m8_b8 | ( | ... | ) | __riscv_vmfeq_vf_f64m8_b8(__VA_ARGS__) |
| #define vmfeq_vf_f64m8_b8_m | ( | ... | ) | __riscv_vmfeq_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfeq_vv_f16m1_b16 | ( | ... | ) | __riscv_vmfeq_vv_f16m1_b16(__VA_ARGS__) |
| #define vmfeq_vv_f16m1_b16_m | ( | ... | ) | __riscv_vmfeq_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfeq_vv_f16m2_b8 | ( | ... | ) | __riscv_vmfeq_vv_f16m2_b8(__VA_ARGS__) |
| #define vmfeq_vv_f16m2_b8_m | ( | ... | ) | __riscv_vmfeq_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfeq_vv_f16m4_b4 | ( | ... | ) | __riscv_vmfeq_vv_f16m4_b4(__VA_ARGS__) |
| #define vmfeq_vv_f16m4_b4_m | ( | ... | ) | __riscv_vmfeq_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfeq_vv_f16m8_b2 | ( | ... | ) | __riscv_vmfeq_vv_f16m8_b2(__VA_ARGS__) |
| #define vmfeq_vv_f16m8_b2_m | ( | ... | ) | __riscv_vmfeq_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfeq_vv_f16mf2_b32 | ( | ... | ) | __riscv_vmfeq_vv_f16mf2_b32(__VA_ARGS__) |
| #define vmfeq_vv_f16mf2_b32_m | ( | ... | ) | __riscv_vmfeq_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfeq_vv_f16mf4_b64 | ( | ... | ) | __riscv_vmfeq_vv_f16mf4_b64(__VA_ARGS__) |
| #define vmfeq_vv_f16mf4_b64_m | ( | ... | ) | __riscv_vmfeq_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfeq_vv_f32m1_b32 | ( | ... | ) | __riscv_vmfeq_vv_f32m1_b32(__VA_ARGS__) |
| #define vmfeq_vv_f32m1_b32_m | ( | ... | ) | __riscv_vmfeq_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfeq_vv_f32m2_b16 | ( | ... | ) | __riscv_vmfeq_vv_f32m2_b16(__VA_ARGS__) |
| #define vmfeq_vv_f32m2_b16_m | ( | ... | ) | __riscv_vmfeq_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfeq_vv_f32m4_b8 | ( | ... | ) | __riscv_vmfeq_vv_f32m4_b8(__VA_ARGS__) |
| #define vmfeq_vv_f32m4_b8_m | ( | ... | ) | __riscv_vmfeq_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfeq_vv_f32m8_b4 | ( | ... | ) | __riscv_vmfeq_vv_f32m8_b4(__VA_ARGS__) |
| #define vmfeq_vv_f32m8_b4_m | ( | ... | ) | __riscv_vmfeq_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfeq_vv_f32mf2_b64 | ( | ... | ) | __riscv_vmfeq_vv_f32mf2_b64(__VA_ARGS__) |
| #define vmfeq_vv_f32mf2_b64_m | ( | ... | ) | __riscv_vmfeq_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfeq_vv_f64m1_b64 | ( | ... | ) | __riscv_vmfeq_vv_f64m1_b64(__VA_ARGS__) |
| #define vmfeq_vv_f64m1_b64_m | ( | ... | ) | __riscv_vmfeq_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfeq_vv_f64m2_b32 | ( | ... | ) | __riscv_vmfeq_vv_f64m2_b32(__VA_ARGS__) |
| #define vmfeq_vv_f64m2_b32_m | ( | ... | ) | __riscv_vmfeq_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfeq_vv_f64m4_b16 | ( | ... | ) | __riscv_vmfeq_vv_f64m4_b16(__VA_ARGS__) |
| #define vmfeq_vv_f64m4_b16_m | ( | ... | ) | __riscv_vmfeq_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfeq_vv_f64m8_b8 | ( | ... | ) | __riscv_vmfeq_vv_f64m8_b8(__VA_ARGS__) |
| #define vmfeq_vv_f64m8_b8_m | ( | ... | ) | __riscv_vmfeq_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfge_vf_f16m1_b16 | ( | ... | ) | __riscv_vmfge_vf_f16m1_b16(__VA_ARGS__) |
| #define vmfge_vf_f16m1_b16_m | ( | ... | ) | __riscv_vmfge_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfge_vf_f16m2_b8 | ( | ... | ) | __riscv_vmfge_vf_f16m2_b8(__VA_ARGS__) |
| #define vmfge_vf_f16m2_b8_m | ( | ... | ) | __riscv_vmfge_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfge_vf_f16m4_b4 | ( | ... | ) | __riscv_vmfge_vf_f16m4_b4(__VA_ARGS__) |
| #define vmfge_vf_f16m4_b4_m | ( | ... | ) | __riscv_vmfge_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfge_vf_f16m8_b2 | ( | ... | ) | __riscv_vmfge_vf_f16m8_b2(__VA_ARGS__) |
| #define vmfge_vf_f16m8_b2_m | ( | ... | ) | __riscv_vmfge_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfge_vf_f16mf2_b32 | ( | ... | ) | __riscv_vmfge_vf_f16mf2_b32(__VA_ARGS__) |
| #define vmfge_vf_f16mf2_b32_m | ( | ... | ) | __riscv_vmfge_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfge_vf_f16mf4_b64 | ( | ... | ) | __riscv_vmfge_vf_f16mf4_b64(__VA_ARGS__) |
| #define vmfge_vf_f16mf4_b64_m | ( | ... | ) | __riscv_vmfge_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfge_vf_f32m1_b32 | ( | ... | ) | __riscv_vmfge_vf_f32m1_b32(__VA_ARGS__) |
| #define vmfge_vf_f32m1_b32_m | ( | ... | ) | __riscv_vmfge_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfge_vf_f32m2_b16 | ( | ... | ) | __riscv_vmfge_vf_f32m2_b16(__VA_ARGS__) |
| #define vmfge_vf_f32m2_b16_m | ( | ... | ) | __riscv_vmfge_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfge_vf_f32m4_b8 | ( | ... | ) | __riscv_vmfge_vf_f32m4_b8(__VA_ARGS__) |
| #define vmfge_vf_f32m4_b8_m | ( | ... | ) | __riscv_vmfge_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfge_vf_f32m8_b4 | ( | ... | ) | __riscv_vmfge_vf_f32m8_b4(__VA_ARGS__) |
| #define vmfge_vf_f32m8_b4_m | ( | ... | ) | __riscv_vmfge_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfge_vf_f32mf2_b64 | ( | ... | ) | __riscv_vmfge_vf_f32mf2_b64(__VA_ARGS__) |
| #define vmfge_vf_f32mf2_b64_m | ( | ... | ) | __riscv_vmfge_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfge_vf_f64m1_b64 | ( | ... | ) | __riscv_vmfge_vf_f64m1_b64(__VA_ARGS__) |
| #define vmfge_vf_f64m1_b64_m | ( | ... | ) | __riscv_vmfge_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfge_vf_f64m2_b32 | ( | ... | ) | __riscv_vmfge_vf_f64m2_b32(__VA_ARGS__) |
| #define vmfge_vf_f64m2_b32_m | ( | ... | ) | __riscv_vmfge_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfge_vf_f64m4_b16 | ( | ... | ) | __riscv_vmfge_vf_f64m4_b16(__VA_ARGS__) |
| #define vmfge_vf_f64m4_b16_m | ( | ... | ) | __riscv_vmfge_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfge_vf_f64m8_b8 | ( | ... | ) | __riscv_vmfge_vf_f64m8_b8(__VA_ARGS__) |
| #define vmfge_vf_f64m8_b8_m | ( | ... | ) | __riscv_vmfge_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfge_vv_f16m1_b16 | ( | ... | ) | __riscv_vmfge_vv_f16m1_b16(__VA_ARGS__) |
| #define vmfge_vv_f16m1_b16_m | ( | ... | ) | __riscv_vmfge_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfge_vv_f16m2_b8 | ( | ... | ) | __riscv_vmfge_vv_f16m2_b8(__VA_ARGS__) |
| #define vmfge_vv_f16m2_b8_m | ( | ... | ) | __riscv_vmfge_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfge_vv_f16m4_b4 | ( | ... | ) | __riscv_vmfge_vv_f16m4_b4(__VA_ARGS__) |
| #define vmfge_vv_f16m4_b4_m | ( | ... | ) | __riscv_vmfge_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfge_vv_f16m8_b2 | ( | ... | ) | __riscv_vmfge_vv_f16m8_b2(__VA_ARGS__) |
| #define vmfge_vv_f16m8_b2_m | ( | ... | ) | __riscv_vmfge_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfge_vv_f16mf2_b32 | ( | ... | ) | __riscv_vmfge_vv_f16mf2_b32(__VA_ARGS__) |
| #define vmfge_vv_f16mf2_b32_m | ( | ... | ) | __riscv_vmfge_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfge_vv_f16mf4_b64 | ( | ... | ) | __riscv_vmfge_vv_f16mf4_b64(__VA_ARGS__) |
| #define vmfge_vv_f16mf4_b64_m | ( | ... | ) | __riscv_vmfge_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfge_vv_f32m1_b32 | ( | ... | ) | __riscv_vmfge_vv_f32m1_b32(__VA_ARGS__) |
| #define vmfge_vv_f32m1_b32_m | ( | ... | ) | __riscv_vmfge_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfge_vv_f32m2_b16 | ( | ... | ) | __riscv_vmfge_vv_f32m2_b16(__VA_ARGS__) |
| #define vmfge_vv_f32m2_b16_m | ( | ... | ) | __riscv_vmfge_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfge_vv_f32m4_b8 | ( | ... | ) | __riscv_vmfge_vv_f32m4_b8(__VA_ARGS__) |
| #define vmfge_vv_f32m4_b8_m | ( | ... | ) | __riscv_vmfge_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfge_vv_f32m8_b4 | ( | ... | ) | __riscv_vmfge_vv_f32m8_b4(__VA_ARGS__) |
| #define vmfge_vv_f32m8_b4_m | ( | ... | ) | __riscv_vmfge_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfge_vv_f32mf2_b64 | ( | ... | ) | __riscv_vmfge_vv_f32mf2_b64(__VA_ARGS__) |
| #define vmfge_vv_f32mf2_b64_m | ( | ... | ) | __riscv_vmfge_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfge_vv_f64m1_b64 | ( | ... | ) | __riscv_vmfge_vv_f64m1_b64(__VA_ARGS__) |
| #define vmfge_vv_f64m1_b64_m | ( | ... | ) | __riscv_vmfge_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfge_vv_f64m2_b32 | ( | ... | ) | __riscv_vmfge_vv_f64m2_b32(__VA_ARGS__) |
| #define vmfge_vv_f64m2_b32_m | ( | ... | ) | __riscv_vmfge_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfge_vv_f64m4_b16 | ( | ... | ) | __riscv_vmfge_vv_f64m4_b16(__VA_ARGS__) |
| #define vmfge_vv_f64m4_b16_m | ( | ... | ) | __riscv_vmfge_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfge_vv_f64m8_b8 | ( | ... | ) | __riscv_vmfge_vv_f64m8_b8(__VA_ARGS__) |
| #define vmfge_vv_f64m8_b8_m | ( | ... | ) | __riscv_vmfge_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfgt_vf_f16m1_b16 | ( | ... | ) | __riscv_vmfgt_vf_f16m1_b16(__VA_ARGS__) |
| #define vmfgt_vf_f16m1_b16_m | ( | ... | ) | __riscv_vmfgt_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfgt_vf_f16m2_b8 | ( | ... | ) | __riscv_vmfgt_vf_f16m2_b8(__VA_ARGS__) |
| #define vmfgt_vf_f16m2_b8_m | ( | ... | ) | __riscv_vmfgt_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfgt_vf_f16m4_b4 | ( | ... | ) | __riscv_vmfgt_vf_f16m4_b4(__VA_ARGS__) |
| #define vmfgt_vf_f16m4_b4_m | ( | ... | ) | __riscv_vmfgt_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfgt_vf_f16m8_b2 | ( | ... | ) | __riscv_vmfgt_vf_f16m8_b2(__VA_ARGS__) |
| #define vmfgt_vf_f16m8_b2_m | ( | ... | ) | __riscv_vmfgt_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfgt_vf_f16mf2_b32 | ( | ... | ) | __riscv_vmfgt_vf_f16mf2_b32(__VA_ARGS__) |
| #define vmfgt_vf_f16mf2_b32_m | ( | ... | ) | __riscv_vmfgt_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfgt_vf_f16mf4_b64 | ( | ... | ) | __riscv_vmfgt_vf_f16mf4_b64(__VA_ARGS__) |
| #define vmfgt_vf_f16mf4_b64_m | ( | ... | ) | __riscv_vmfgt_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfgt_vf_f32m1_b32 | ( | ... | ) | __riscv_vmfgt_vf_f32m1_b32(__VA_ARGS__) |
| #define vmfgt_vf_f32m1_b32_m | ( | ... | ) | __riscv_vmfgt_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfgt_vf_f32m2_b16 | ( | ... | ) | __riscv_vmfgt_vf_f32m2_b16(__VA_ARGS__) |
| #define vmfgt_vf_f32m2_b16_m | ( | ... | ) | __riscv_vmfgt_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfgt_vf_f32m4_b8 | ( | ... | ) | __riscv_vmfgt_vf_f32m4_b8(__VA_ARGS__) |
| #define vmfgt_vf_f32m4_b8_m | ( | ... | ) | __riscv_vmfgt_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfgt_vf_f32m8_b4 | ( | ... | ) | __riscv_vmfgt_vf_f32m8_b4(__VA_ARGS__) |
| #define vmfgt_vf_f32m8_b4_m | ( | ... | ) | __riscv_vmfgt_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfgt_vf_f32mf2_b64 | ( | ... | ) | __riscv_vmfgt_vf_f32mf2_b64(__VA_ARGS__) |
| #define vmfgt_vf_f32mf2_b64_m | ( | ... | ) | __riscv_vmfgt_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfgt_vf_f64m1_b64 | ( | ... | ) | __riscv_vmfgt_vf_f64m1_b64(__VA_ARGS__) |
| #define vmfgt_vf_f64m1_b64_m | ( | ... | ) | __riscv_vmfgt_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfgt_vf_f64m2_b32 | ( | ... | ) | __riscv_vmfgt_vf_f64m2_b32(__VA_ARGS__) |
| #define vmfgt_vf_f64m2_b32_m | ( | ... | ) | __riscv_vmfgt_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfgt_vf_f64m4_b16 | ( | ... | ) | __riscv_vmfgt_vf_f64m4_b16(__VA_ARGS__) |
| #define vmfgt_vf_f64m4_b16_m | ( | ... | ) | __riscv_vmfgt_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfgt_vf_f64m8_b8 | ( | ... | ) | __riscv_vmfgt_vf_f64m8_b8(__VA_ARGS__) |
| #define vmfgt_vf_f64m8_b8_m | ( | ... | ) | __riscv_vmfgt_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfgt_vv_f16m1_b16 | ( | ... | ) | __riscv_vmfgt_vv_f16m1_b16(__VA_ARGS__) |
| #define vmfgt_vv_f16m1_b16_m | ( | ... | ) | __riscv_vmfgt_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfgt_vv_f16m2_b8 | ( | ... | ) | __riscv_vmfgt_vv_f16m2_b8(__VA_ARGS__) |
| #define vmfgt_vv_f16m2_b8_m | ( | ... | ) | __riscv_vmfgt_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfgt_vv_f16m4_b4 | ( | ... | ) | __riscv_vmfgt_vv_f16m4_b4(__VA_ARGS__) |
| #define vmfgt_vv_f16m4_b4_m | ( | ... | ) | __riscv_vmfgt_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfgt_vv_f16m8_b2 | ( | ... | ) | __riscv_vmfgt_vv_f16m8_b2(__VA_ARGS__) |
| #define vmfgt_vv_f16m8_b2_m | ( | ... | ) | __riscv_vmfgt_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfgt_vv_f16mf2_b32 | ( | ... | ) | __riscv_vmfgt_vv_f16mf2_b32(__VA_ARGS__) |
| #define vmfgt_vv_f16mf2_b32_m | ( | ... | ) | __riscv_vmfgt_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfgt_vv_f16mf4_b64 | ( | ... | ) | __riscv_vmfgt_vv_f16mf4_b64(__VA_ARGS__) |
| #define vmfgt_vv_f16mf4_b64_m | ( | ... | ) | __riscv_vmfgt_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfgt_vv_f32m1_b32 | ( | ... | ) | __riscv_vmfgt_vv_f32m1_b32(__VA_ARGS__) |
| #define vmfgt_vv_f32m1_b32_m | ( | ... | ) | __riscv_vmfgt_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfgt_vv_f32m2_b16 | ( | ... | ) | __riscv_vmfgt_vv_f32m2_b16(__VA_ARGS__) |
| #define vmfgt_vv_f32m2_b16_m | ( | ... | ) | __riscv_vmfgt_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfgt_vv_f32m4_b8 | ( | ... | ) | __riscv_vmfgt_vv_f32m4_b8(__VA_ARGS__) |
| #define vmfgt_vv_f32m4_b8_m | ( | ... | ) | __riscv_vmfgt_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfgt_vv_f32m8_b4 | ( | ... | ) | __riscv_vmfgt_vv_f32m8_b4(__VA_ARGS__) |
| #define vmfgt_vv_f32m8_b4_m | ( | ... | ) | __riscv_vmfgt_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfgt_vv_f32mf2_b64 | ( | ... | ) | __riscv_vmfgt_vv_f32mf2_b64(__VA_ARGS__) |
| #define vmfgt_vv_f32mf2_b64_m | ( | ... | ) | __riscv_vmfgt_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfgt_vv_f64m1_b64 | ( | ... | ) | __riscv_vmfgt_vv_f64m1_b64(__VA_ARGS__) |
| #define vmfgt_vv_f64m1_b64_m | ( | ... | ) | __riscv_vmfgt_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfgt_vv_f64m2_b32 | ( | ... | ) | __riscv_vmfgt_vv_f64m2_b32(__VA_ARGS__) |
| #define vmfgt_vv_f64m2_b32_m | ( | ... | ) | __riscv_vmfgt_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfgt_vv_f64m4_b16 | ( | ... | ) | __riscv_vmfgt_vv_f64m4_b16(__VA_ARGS__) |
| #define vmfgt_vv_f64m4_b16_m | ( | ... | ) | __riscv_vmfgt_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfgt_vv_f64m8_b8 | ( | ... | ) | __riscv_vmfgt_vv_f64m8_b8(__VA_ARGS__) |
| #define vmfgt_vv_f64m8_b8_m | ( | ... | ) | __riscv_vmfgt_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfle_vf_f16m1_b16 | ( | ... | ) | __riscv_vmfle_vf_f16m1_b16(__VA_ARGS__) |
| #define vmfle_vf_f16m1_b16_m | ( | ... | ) | __riscv_vmfle_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfle_vf_f16m2_b8 | ( | ... | ) | __riscv_vmfle_vf_f16m2_b8(__VA_ARGS__) |
| #define vmfle_vf_f16m2_b8_m | ( | ... | ) | __riscv_vmfle_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfle_vf_f16m4_b4 | ( | ... | ) | __riscv_vmfle_vf_f16m4_b4(__VA_ARGS__) |
| #define vmfle_vf_f16m4_b4_m | ( | ... | ) | __riscv_vmfle_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfle_vf_f16m8_b2 | ( | ... | ) | __riscv_vmfle_vf_f16m8_b2(__VA_ARGS__) |
| #define vmfle_vf_f16m8_b2_m | ( | ... | ) | __riscv_vmfle_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfle_vf_f16mf2_b32 | ( | ... | ) | __riscv_vmfle_vf_f16mf2_b32(__VA_ARGS__) |
| #define vmfle_vf_f16mf2_b32_m | ( | ... | ) | __riscv_vmfle_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfle_vf_f16mf4_b64 | ( | ... | ) | __riscv_vmfle_vf_f16mf4_b64(__VA_ARGS__) |
| #define vmfle_vf_f16mf4_b64_m | ( | ... | ) | __riscv_vmfle_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfle_vf_f32m1_b32 | ( | ... | ) | __riscv_vmfle_vf_f32m1_b32(__VA_ARGS__) |
| #define vmfle_vf_f32m1_b32_m | ( | ... | ) | __riscv_vmfle_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfle_vf_f32m2_b16 | ( | ... | ) | __riscv_vmfle_vf_f32m2_b16(__VA_ARGS__) |
| #define vmfle_vf_f32m2_b16_m | ( | ... | ) | __riscv_vmfle_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfle_vf_f32m4_b8 | ( | ... | ) | __riscv_vmfle_vf_f32m4_b8(__VA_ARGS__) |
| #define vmfle_vf_f32m4_b8_m | ( | ... | ) | __riscv_vmfle_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfle_vf_f32m8_b4 | ( | ... | ) | __riscv_vmfle_vf_f32m8_b4(__VA_ARGS__) |
| #define vmfle_vf_f32m8_b4_m | ( | ... | ) | __riscv_vmfle_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfle_vf_f32mf2_b64 | ( | ... | ) | __riscv_vmfle_vf_f32mf2_b64(__VA_ARGS__) |
| #define vmfle_vf_f32mf2_b64_m | ( | ... | ) | __riscv_vmfle_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfle_vf_f64m1_b64 | ( | ... | ) | __riscv_vmfle_vf_f64m1_b64(__VA_ARGS__) |
| #define vmfle_vf_f64m1_b64_m | ( | ... | ) | __riscv_vmfle_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfle_vf_f64m2_b32 | ( | ... | ) | __riscv_vmfle_vf_f64m2_b32(__VA_ARGS__) |
| #define vmfle_vf_f64m2_b32_m | ( | ... | ) | __riscv_vmfle_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfle_vf_f64m4_b16 | ( | ... | ) | __riscv_vmfle_vf_f64m4_b16(__VA_ARGS__) |
| #define vmfle_vf_f64m4_b16_m | ( | ... | ) | __riscv_vmfle_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfle_vf_f64m8_b8 | ( | ... | ) | __riscv_vmfle_vf_f64m8_b8(__VA_ARGS__) |
| #define vmfle_vf_f64m8_b8_m | ( | ... | ) | __riscv_vmfle_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfle_vv_f16m1_b16 | ( | ... | ) | __riscv_vmfle_vv_f16m1_b16(__VA_ARGS__) |
| #define vmfle_vv_f16m1_b16_m | ( | ... | ) | __riscv_vmfle_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfle_vv_f16m2_b8 | ( | ... | ) | __riscv_vmfle_vv_f16m2_b8(__VA_ARGS__) |
| #define vmfle_vv_f16m2_b8_m | ( | ... | ) | __riscv_vmfle_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfle_vv_f16m4_b4 | ( | ... | ) | __riscv_vmfle_vv_f16m4_b4(__VA_ARGS__) |
| #define vmfle_vv_f16m4_b4_m | ( | ... | ) | __riscv_vmfle_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfle_vv_f16m8_b2 | ( | ... | ) | __riscv_vmfle_vv_f16m8_b2(__VA_ARGS__) |
| #define vmfle_vv_f16m8_b2_m | ( | ... | ) | __riscv_vmfle_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfle_vv_f16mf2_b32 | ( | ... | ) | __riscv_vmfle_vv_f16mf2_b32(__VA_ARGS__) |
| #define vmfle_vv_f16mf2_b32_m | ( | ... | ) | __riscv_vmfle_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfle_vv_f16mf4_b64 | ( | ... | ) | __riscv_vmfle_vv_f16mf4_b64(__VA_ARGS__) |
| #define vmfle_vv_f16mf4_b64_m | ( | ... | ) | __riscv_vmfle_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfle_vv_f32m1_b32 | ( | ... | ) | __riscv_vmfle_vv_f32m1_b32(__VA_ARGS__) |
| #define vmfle_vv_f32m1_b32_m | ( | ... | ) | __riscv_vmfle_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfle_vv_f32m2_b16 | ( | ... | ) | __riscv_vmfle_vv_f32m2_b16(__VA_ARGS__) |
| #define vmfle_vv_f32m2_b16_m | ( | ... | ) | __riscv_vmfle_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfle_vv_f32m4_b8 | ( | ... | ) | __riscv_vmfle_vv_f32m4_b8(__VA_ARGS__) |
| #define vmfle_vv_f32m4_b8_m | ( | ... | ) | __riscv_vmfle_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfle_vv_f32m8_b4 | ( | ... | ) | __riscv_vmfle_vv_f32m8_b4(__VA_ARGS__) |
| #define vmfle_vv_f32m8_b4_m | ( | ... | ) | __riscv_vmfle_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfle_vv_f32mf2_b64 | ( | ... | ) | __riscv_vmfle_vv_f32mf2_b64(__VA_ARGS__) |
| #define vmfle_vv_f32mf2_b64_m | ( | ... | ) | __riscv_vmfle_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfle_vv_f64m1_b64 | ( | ... | ) | __riscv_vmfle_vv_f64m1_b64(__VA_ARGS__) |
| #define vmfle_vv_f64m1_b64_m | ( | ... | ) | __riscv_vmfle_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfle_vv_f64m2_b32 | ( | ... | ) | __riscv_vmfle_vv_f64m2_b32(__VA_ARGS__) |
| #define vmfle_vv_f64m2_b32_m | ( | ... | ) | __riscv_vmfle_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfle_vv_f64m4_b16 | ( | ... | ) | __riscv_vmfle_vv_f64m4_b16(__VA_ARGS__) |
| #define vmfle_vv_f64m4_b16_m | ( | ... | ) | __riscv_vmfle_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfle_vv_f64m8_b8 | ( | ... | ) | __riscv_vmfle_vv_f64m8_b8(__VA_ARGS__) |
| #define vmfle_vv_f64m8_b8_m | ( | ... | ) | __riscv_vmfle_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define vmflt_vf_f16m1_b16 | ( | ... | ) | __riscv_vmflt_vf_f16m1_b16(__VA_ARGS__) |
| #define vmflt_vf_f16m1_b16_m | ( | ... | ) | __riscv_vmflt_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define vmflt_vf_f16m2_b8 | ( | ... | ) | __riscv_vmflt_vf_f16m2_b8(__VA_ARGS__) |
| #define vmflt_vf_f16m2_b8_m | ( | ... | ) | __riscv_vmflt_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define vmflt_vf_f16m4_b4 | ( | ... | ) | __riscv_vmflt_vf_f16m4_b4(__VA_ARGS__) |
| #define vmflt_vf_f16m4_b4_m | ( | ... | ) | __riscv_vmflt_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define vmflt_vf_f16m8_b2 | ( | ... | ) | __riscv_vmflt_vf_f16m8_b2(__VA_ARGS__) |
| #define vmflt_vf_f16m8_b2_m | ( | ... | ) | __riscv_vmflt_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define vmflt_vf_f16mf2_b32 | ( | ... | ) | __riscv_vmflt_vf_f16mf2_b32(__VA_ARGS__) |
| #define vmflt_vf_f16mf2_b32_m | ( | ... | ) | __riscv_vmflt_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmflt_vf_f16mf4_b64 | ( | ... | ) | __riscv_vmflt_vf_f16mf4_b64(__VA_ARGS__) |
| #define vmflt_vf_f16mf4_b64_m | ( | ... | ) | __riscv_vmflt_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmflt_vf_f32m1_b32 | ( | ... | ) | __riscv_vmflt_vf_f32m1_b32(__VA_ARGS__) |
| #define vmflt_vf_f32m1_b32_m | ( | ... | ) | __riscv_vmflt_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define vmflt_vf_f32m2_b16 | ( | ... | ) | __riscv_vmflt_vf_f32m2_b16(__VA_ARGS__) |
| #define vmflt_vf_f32m2_b16_m | ( | ... | ) | __riscv_vmflt_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define vmflt_vf_f32m4_b8 | ( | ... | ) | __riscv_vmflt_vf_f32m4_b8(__VA_ARGS__) |
| #define vmflt_vf_f32m4_b8_m | ( | ... | ) | __riscv_vmflt_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define vmflt_vf_f32m8_b4 | ( | ... | ) | __riscv_vmflt_vf_f32m8_b4(__VA_ARGS__) |
| #define vmflt_vf_f32m8_b4_m | ( | ... | ) | __riscv_vmflt_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define vmflt_vf_f32mf2_b64 | ( | ... | ) | __riscv_vmflt_vf_f32mf2_b64(__VA_ARGS__) |
| #define vmflt_vf_f32mf2_b64_m | ( | ... | ) | __riscv_vmflt_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmflt_vf_f64m1_b64 | ( | ... | ) | __riscv_vmflt_vf_f64m1_b64(__VA_ARGS__) |
| #define vmflt_vf_f64m1_b64_m | ( | ... | ) | __riscv_vmflt_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define vmflt_vf_f64m2_b32 | ( | ... | ) | __riscv_vmflt_vf_f64m2_b32(__VA_ARGS__) |
| #define vmflt_vf_f64m2_b32_m | ( | ... | ) | __riscv_vmflt_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define vmflt_vf_f64m4_b16 | ( | ... | ) | __riscv_vmflt_vf_f64m4_b16(__VA_ARGS__) |
| #define vmflt_vf_f64m4_b16_m | ( | ... | ) | __riscv_vmflt_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define vmflt_vf_f64m8_b8 | ( | ... | ) | __riscv_vmflt_vf_f64m8_b8(__VA_ARGS__) |
| #define vmflt_vf_f64m8_b8_m | ( | ... | ) | __riscv_vmflt_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define vmflt_vv_f16m1_b16 | ( | ... | ) | __riscv_vmflt_vv_f16m1_b16(__VA_ARGS__) |
| #define vmflt_vv_f16m1_b16_m | ( | ... | ) | __riscv_vmflt_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define vmflt_vv_f16m2_b8 | ( | ... | ) | __riscv_vmflt_vv_f16m2_b8(__VA_ARGS__) |
| #define vmflt_vv_f16m2_b8_m | ( | ... | ) | __riscv_vmflt_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define vmflt_vv_f16m4_b4 | ( | ... | ) | __riscv_vmflt_vv_f16m4_b4(__VA_ARGS__) |
| #define vmflt_vv_f16m4_b4_m | ( | ... | ) | __riscv_vmflt_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define vmflt_vv_f16m8_b2 | ( | ... | ) | __riscv_vmflt_vv_f16m8_b2(__VA_ARGS__) |
| #define vmflt_vv_f16m8_b2_m | ( | ... | ) | __riscv_vmflt_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define vmflt_vv_f16mf2_b32 | ( | ... | ) | __riscv_vmflt_vv_f16mf2_b32(__VA_ARGS__) |
| #define vmflt_vv_f16mf2_b32_m | ( | ... | ) | __riscv_vmflt_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmflt_vv_f16mf4_b64 | ( | ... | ) | __riscv_vmflt_vv_f16mf4_b64(__VA_ARGS__) |
| #define vmflt_vv_f16mf4_b64_m | ( | ... | ) | __riscv_vmflt_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmflt_vv_f32m1_b32 | ( | ... | ) | __riscv_vmflt_vv_f32m1_b32(__VA_ARGS__) |
| #define vmflt_vv_f32m1_b32_m | ( | ... | ) | __riscv_vmflt_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define vmflt_vv_f32m2_b16 | ( | ... | ) | __riscv_vmflt_vv_f32m2_b16(__VA_ARGS__) |
| #define vmflt_vv_f32m2_b16_m | ( | ... | ) | __riscv_vmflt_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define vmflt_vv_f32m4_b8 | ( | ... | ) | __riscv_vmflt_vv_f32m4_b8(__VA_ARGS__) |
| #define vmflt_vv_f32m4_b8_m | ( | ... | ) | __riscv_vmflt_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define vmflt_vv_f32m8_b4 | ( | ... | ) | __riscv_vmflt_vv_f32m8_b4(__VA_ARGS__) |
| #define vmflt_vv_f32m8_b4_m | ( | ... | ) | __riscv_vmflt_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define vmflt_vv_f32mf2_b64 | ( | ... | ) | __riscv_vmflt_vv_f32mf2_b64(__VA_ARGS__) |
| #define vmflt_vv_f32mf2_b64_m | ( | ... | ) | __riscv_vmflt_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmflt_vv_f64m1_b64 | ( | ... | ) | __riscv_vmflt_vv_f64m1_b64(__VA_ARGS__) |
| #define vmflt_vv_f64m1_b64_m | ( | ... | ) | __riscv_vmflt_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define vmflt_vv_f64m2_b32 | ( | ... | ) | __riscv_vmflt_vv_f64m2_b32(__VA_ARGS__) |
| #define vmflt_vv_f64m2_b32_m | ( | ... | ) | __riscv_vmflt_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define vmflt_vv_f64m4_b16 | ( | ... | ) | __riscv_vmflt_vv_f64m4_b16(__VA_ARGS__) |
| #define vmflt_vv_f64m4_b16_m | ( | ... | ) | __riscv_vmflt_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define vmflt_vv_f64m8_b8 | ( | ... | ) | __riscv_vmflt_vv_f64m8_b8(__VA_ARGS__) |
| #define vmflt_vv_f64m8_b8_m | ( | ... | ) | __riscv_vmflt_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfne_vf_f16m1_b16 | ( | ... | ) | __riscv_vmfne_vf_f16m1_b16(__VA_ARGS__) |
| #define vmfne_vf_f16m1_b16_m | ( | ... | ) | __riscv_vmfne_vf_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfne_vf_f16m2_b8 | ( | ... | ) | __riscv_vmfne_vf_f16m2_b8(__VA_ARGS__) |
| #define vmfne_vf_f16m2_b8_m | ( | ... | ) | __riscv_vmfne_vf_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfne_vf_f16m4_b4 | ( | ... | ) | __riscv_vmfne_vf_f16m4_b4(__VA_ARGS__) |
| #define vmfne_vf_f16m4_b4_m | ( | ... | ) | __riscv_vmfne_vf_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfne_vf_f16m8_b2 | ( | ... | ) | __riscv_vmfne_vf_f16m8_b2(__VA_ARGS__) |
| #define vmfne_vf_f16m8_b2_m | ( | ... | ) | __riscv_vmfne_vf_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfne_vf_f16mf2_b32 | ( | ... | ) | __riscv_vmfne_vf_f16mf2_b32(__VA_ARGS__) |
| #define vmfne_vf_f16mf2_b32_m | ( | ... | ) | __riscv_vmfne_vf_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfne_vf_f16mf4_b64 | ( | ... | ) | __riscv_vmfne_vf_f16mf4_b64(__VA_ARGS__) |
| #define vmfne_vf_f16mf4_b64_m | ( | ... | ) | __riscv_vmfne_vf_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfne_vf_f32m1_b32 | ( | ... | ) | __riscv_vmfne_vf_f32m1_b32(__VA_ARGS__) |
| #define vmfne_vf_f32m1_b32_m | ( | ... | ) | __riscv_vmfne_vf_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfne_vf_f32m2_b16 | ( | ... | ) | __riscv_vmfne_vf_f32m2_b16(__VA_ARGS__) |
| #define vmfne_vf_f32m2_b16_m | ( | ... | ) | __riscv_vmfne_vf_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfne_vf_f32m4_b8 | ( | ... | ) | __riscv_vmfne_vf_f32m4_b8(__VA_ARGS__) |
| #define vmfne_vf_f32m4_b8_m | ( | ... | ) | __riscv_vmfne_vf_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfne_vf_f32m8_b4 | ( | ... | ) | __riscv_vmfne_vf_f32m8_b4(__VA_ARGS__) |
| #define vmfne_vf_f32m8_b4_m | ( | ... | ) | __riscv_vmfne_vf_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfne_vf_f32mf2_b64 | ( | ... | ) | __riscv_vmfne_vf_f32mf2_b64(__VA_ARGS__) |
| #define vmfne_vf_f32mf2_b64_m | ( | ... | ) | __riscv_vmfne_vf_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfne_vf_f64m1_b64 | ( | ... | ) | __riscv_vmfne_vf_f64m1_b64(__VA_ARGS__) |
| #define vmfne_vf_f64m1_b64_m | ( | ... | ) | __riscv_vmfne_vf_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfne_vf_f64m2_b32 | ( | ... | ) | __riscv_vmfne_vf_f64m2_b32(__VA_ARGS__) |
| #define vmfne_vf_f64m2_b32_m | ( | ... | ) | __riscv_vmfne_vf_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfne_vf_f64m4_b16 | ( | ... | ) | __riscv_vmfne_vf_f64m4_b16(__VA_ARGS__) |
| #define vmfne_vf_f64m4_b16_m | ( | ... | ) | __riscv_vmfne_vf_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfne_vf_f64m8_b8 | ( | ... | ) | __riscv_vmfne_vf_f64m8_b8(__VA_ARGS__) |
| #define vmfne_vf_f64m8_b8_m | ( | ... | ) | __riscv_vmfne_vf_f64m8_b8_mu(__VA_ARGS__) |
| #define vmfne_vv_f16m1_b16 | ( | ... | ) | __riscv_vmfne_vv_f16m1_b16(__VA_ARGS__) |
| #define vmfne_vv_f16m1_b16_m | ( | ... | ) | __riscv_vmfne_vv_f16m1_b16_mu(__VA_ARGS__) |
| #define vmfne_vv_f16m2_b8 | ( | ... | ) | __riscv_vmfne_vv_f16m2_b8(__VA_ARGS__) |
| #define vmfne_vv_f16m2_b8_m | ( | ... | ) | __riscv_vmfne_vv_f16m2_b8_mu(__VA_ARGS__) |
| #define vmfne_vv_f16m4_b4 | ( | ... | ) | __riscv_vmfne_vv_f16m4_b4(__VA_ARGS__) |
| #define vmfne_vv_f16m4_b4_m | ( | ... | ) | __riscv_vmfne_vv_f16m4_b4_mu(__VA_ARGS__) |
| #define vmfne_vv_f16m8_b2 | ( | ... | ) | __riscv_vmfne_vv_f16m8_b2(__VA_ARGS__) |
| #define vmfne_vv_f16m8_b2_m | ( | ... | ) | __riscv_vmfne_vv_f16m8_b2_mu(__VA_ARGS__) |
| #define vmfne_vv_f16mf2_b32 | ( | ... | ) | __riscv_vmfne_vv_f16mf2_b32(__VA_ARGS__) |
| #define vmfne_vv_f16mf2_b32_m | ( | ... | ) | __riscv_vmfne_vv_f16mf2_b32_mu(__VA_ARGS__) |
| #define vmfne_vv_f16mf4_b64 | ( | ... | ) | __riscv_vmfne_vv_f16mf4_b64(__VA_ARGS__) |
| #define vmfne_vv_f16mf4_b64_m | ( | ... | ) | __riscv_vmfne_vv_f16mf4_b64_mu(__VA_ARGS__) |
| #define vmfne_vv_f32m1_b32 | ( | ... | ) | __riscv_vmfne_vv_f32m1_b32(__VA_ARGS__) |
| #define vmfne_vv_f32m1_b32_m | ( | ... | ) | __riscv_vmfne_vv_f32m1_b32_mu(__VA_ARGS__) |
| #define vmfne_vv_f32m2_b16 | ( | ... | ) | __riscv_vmfne_vv_f32m2_b16(__VA_ARGS__) |
| #define vmfne_vv_f32m2_b16_m | ( | ... | ) | __riscv_vmfne_vv_f32m2_b16_mu(__VA_ARGS__) |
| #define vmfne_vv_f32m4_b8 | ( | ... | ) | __riscv_vmfne_vv_f32m4_b8(__VA_ARGS__) |
| #define vmfne_vv_f32m4_b8_m | ( | ... | ) | __riscv_vmfne_vv_f32m4_b8_mu(__VA_ARGS__) |
| #define vmfne_vv_f32m8_b4 | ( | ... | ) | __riscv_vmfne_vv_f32m8_b4(__VA_ARGS__) |
| #define vmfne_vv_f32m8_b4_m | ( | ... | ) | __riscv_vmfne_vv_f32m8_b4_mu(__VA_ARGS__) |
| #define vmfne_vv_f32mf2_b64 | ( | ... | ) | __riscv_vmfne_vv_f32mf2_b64(__VA_ARGS__) |
| #define vmfne_vv_f32mf2_b64_m | ( | ... | ) | __riscv_vmfne_vv_f32mf2_b64_mu(__VA_ARGS__) |
| #define vmfne_vv_f64m1_b64 | ( | ... | ) | __riscv_vmfne_vv_f64m1_b64(__VA_ARGS__) |
| #define vmfne_vv_f64m1_b64_m | ( | ... | ) | __riscv_vmfne_vv_f64m1_b64_mu(__VA_ARGS__) |
| #define vmfne_vv_f64m2_b32 | ( | ... | ) | __riscv_vmfne_vv_f64m2_b32(__VA_ARGS__) |
| #define vmfne_vv_f64m2_b32_m | ( | ... | ) | __riscv_vmfne_vv_f64m2_b32_mu(__VA_ARGS__) |
| #define vmfne_vv_f64m4_b16 | ( | ... | ) | __riscv_vmfne_vv_f64m4_b16(__VA_ARGS__) |
| #define vmfne_vv_f64m4_b16_m | ( | ... | ) | __riscv_vmfne_vv_f64m4_b16_mu(__VA_ARGS__) |
| #define vmfne_vv_f64m8_b8 | ( | ... | ) | __riscv_vmfne_vv_f64m8_b8(__VA_ARGS__) |
| #define vmfne_vv_f64m8_b8_m | ( | ... | ) | __riscv_vmfne_vv_f64m8_b8_mu(__VA_ARGS__) |
| #define vmin_vv_i16m1 | ( | ... | ) | __riscv_vmin_vv_i16m1(__VA_ARGS__) |
| #define vmin_vv_i16m1_m | ( | ... | ) | __riscv_vmin_vv_i16m1_tumu(__VA_ARGS__) |
| #define vmin_vv_i16m2 | ( | ... | ) | __riscv_vmin_vv_i16m2(__VA_ARGS__) |
| #define vmin_vv_i16m2_m | ( | ... | ) | __riscv_vmin_vv_i16m2_tumu(__VA_ARGS__) |
| #define vmin_vv_i16m4 | ( | ... | ) | __riscv_vmin_vv_i16m4(__VA_ARGS__) |
| #define vmin_vv_i16m4_m | ( | ... | ) | __riscv_vmin_vv_i16m4_tumu(__VA_ARGS__) |
| #define vmin_vv_i16m8 | ( | ... | ) | __riscv_vmin_vv_i16m8(__VA_ARGS__) |
| #define vmin_vv_i16m8_m | ( | ... | ) | __riscv_vmin_vv_i16m8_tumu(__VA_ARGS__) |
| #define vmin_vv_i16mf2 | ( | ... | ) | __riscv_vmin_vv_i16mf2(__VA_ARGS__) |
| #define vmin_vv_i16mf2_m | ( | ... | ) | __riscv_vmin_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vmin_vv_i16mf4 | ( | ... | ) | __riscv_vmin_vv_i16mf4(__VA_ARGS__) |
| #define vmin_vv_i16mf4_m | ( | ... | ) | __riscv_vmin_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vmin_vv_i32m1 | ( | ... | ) | __riscv_vmin_vv_i32m1(__VA_ARGS__) |
| #define vmin_vv_i32m1_m | ( | ... | ) | __riscv_vmin_vv_i32m1_tumu(__VA_ARGS__) |
| #define vmin_vv_i32m2 | ( | ... | ) | __riscv_vmin_vv_i32m2(__VA_ARGS__) |
| #define vmin_vv_i32m2_m | ( | ... | ) | __riscv_vmin_vv_i32m2_tumu(__VA_ARGS__) |
| #define vmin_vv_i32m4 | ( | ... | ) | __riscv_vmin_vv_i32m4(__VA_ARGS__) |
| #define vmin_vv_i32m4_m | ( | ... | ) | __riscv_vmin_vv_i32m4_tumu(__VA_ARGS__) |
| #define vmin_vv_i32m8 | ( | ... | ) | __riscv_vmin_vv_i32m8(__VA_ARGS__) |
| #define vmin_vv_i32m8_m | ( | ... | ) | __riscv_vmin_vv_i32m8_tumu(__VA_ARGS__) |
| #define vmin_vv_i32mf2 | ( | ... | ) | __riscv_vmin_vv_i32mf2(__VA_ARGS__) |
| #define vmin_vv_i32mf2_m | ( | ... | ) | __riscv_vmin_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vmin_vv_i64m1 | ( | ... | ) | __riscv_vmin_vv_i64m1(__VA_ARGS__) |
| #define vmin_vv_i64m1_m | ( | ... | ) | __riscv_vmin_vv_i64m1_tumu(__VA_ARGS__) |
| #define vmin_vv_i64m2 | ( | ... | ) | __riscv_vmin_vv_i64m2(__VA_ARGS__) |
| #define vmin_vv_i64m2_m | ( | ... | ) | __riscv_vmin_vv_i64m2_tumu(__VA_ARGS__) |
| #define vmin_vv_i64m4 | ( | ... | ) | __riscv_vmin_vv_i64m4(__VA_ARGS__) |
| #define vmin_vv_i64m4_m | ( | ... | ) | __riscv_vmin_vv_i64m4_tumu(__VA_ARGS__) |
| #define vmin_vv_i64m8 | ( | ... | ) | __riscv_vmin_vv_i64m8(__VA_ARGS__) |
| #define vmin_vv_i64m8_m | ( | ... | ) | __riscv_vmin_vv_i64m8_tumu(__VA_ARGS__) |
| #define vmin_vv_i8m1 | ( | ... | ) | __riscv_vmin_vv_i8m1(__VA_ARGS__) |
| #define vmin_vv_i8m1_m | ( | ... | ) | __riscv_vmin_vv_i8m1_tumu(__VA_ARGS__) |
| #define vmin_vv_i8m2 | ( | ... | ) | __riscv_vmin_vv_i8m2(__VA_ARGS__) |
| #define vmin_vv_i8m2_m | ( | ... | ) | __riscv_vmin_vv_i8m2_tumu(__VA_ARGS__) |
| #define vmin_vv_i8m4 | ( | ... | ) | __riscv_vmin_vv_i8m4(__VA_ARGS__) |
| #define vmin_vv_i8m4_m | ( | ... | ) | __riscv_vmin_vv_i8m4_tumu(__VA_ARGS__) |
| #define vmin_vv_i8m8 | ( | ... | ) | __riscv_vmin_vv_i8m8(__VA_ARGS__) |
| #define vmin_vv_i8m8_m | ( | ... | ) | __riscv_vmin_vv_i8m8_tumu(__VA_ARGS__) |
| #define vmin_vv_i8mf2 | ( | ... | ) | __riscv_vmin_vv_i8mf2(__VA_ARGS__) |
| #define vmin_vv_i8mf2_m | ( | ... | ) | __riscv_vmin_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vmin_vv_i8mf4 | ( | ... | ) | __riscv_vmin_vv_i8mf4(__VA_ARGS__) |
| #define vmin_vv_i8mf4_m | ( | ... | ) | __riscv_vmin_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vmin_vv_i8mf8 | ( | ... | ) | __riscv_vmin_vv_i8mf8(__VA_ARGS__) |
| #define vmin_vv_i8mf8_m | ( | ... | ) | __riscv_vmin_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vmin_vx_i16m1 | ( | ... | ) | __riscv_vmin_vx_i16m1(__VA_ARGS__) |
| #define vmin_vx_i16m1_m | ( | ... | ) | __riscv_vmin_vx_i16m1_tumu(__VA_ARGS__) |
| #define vmin_vx_i16m2 | ( | ... | ) | __riscv_vmin_vx_i16m2(__VA_ARGS__) |
| #define vmin_vx_i16m2_m | ( | ... | ) | __riscv_vmin_vx_i16m2_tumu(__VA_ARGS__) |
| #define vmin_vx_i16m4 | ( | ... | ) | __riscv_vmin_vx_i16m4(__VA_ARGS__) |
| #define vmin_vx_i16m4_m | ( | ... | ) | __riscv_vmin_vx_i16m4_tumu(__VA_ARGS__) |
| #define vmin_vx_i16m8 | ( | ... | ) | __riscv_vmin_vx_i16m8(__VA_ARGS__) |
| #define vmin_vx_i16m8_m | ( | ... | ) | __riscv_vmin_vx_i16m8_tumu(__VA_ARGS__) |
| #define vmin_vx_i16mf2 | ( | ... | ) | __riscv_vmin_vx_i16mf2(__VA_ARGS__) |
| #define vmin_vx_i16mf2_m | ( | ... | ) | __riscv_vmin_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vmin_vx_i16mf4 | ( | ... | ) | __riscv_vmin_vx_i16mf4(__VA_ARGS__) |
| #define vmin_vx_i16mf4_m | ( | ... | ) | __riscv_vmin_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vmin_vx_i32m1 | ( | ... | ) | __riscv_vmin_vx_i32m1(__VA_ARGS__) |
| #define vmin_vx_i32m1_m | ( | ... | ) | __riscv_vmin_vx_i32m1_tumu(__VA_ARGS__) |
| #define vmin_vx_i32m2 | ( | ... | ) | __riscv_vmin_vx_i32m2(__VA_ARGS__) |
| #define vmin_vx_i32m2_m | ( | ... | ) | __riscv_vmin_vx_i32m2_tumu(__VA_ARGS__) |
| #define vmin_vx_i32m4 | ( | ... | ) | __riscv_vmin_vx_i32m4(__VA_ARGS__) |
| #define vmin_vx_i32m4_m | ( | ... | ) | __riscv_vmin_vx_i32m4_tumu(__VA_ARGS__) |
| #define vmin_vx_i32m8 | ( | ... | ) | __riscv_vmin_vx_i32m8(__VA_ARGS__) |
| #define vmin_vx_i32m8_m | ( | ... | ) | __riscv_vmin_vx_i32m8_tumu(__VA_ARGS__) |
| #define vmin_vx_i32mf2 | ( | ... | ) | __riscv_vmin_vx_i32mf2(__VA_ARGS__) |
| #define vmin_vx_i32mf2_m | ( | ... | ) | __riscv_vmin_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vmin_vx_i64m1 | ( | ... | ) | __riscv_vmin_vx_i64m1(__VA_ARGS__) |
| #define vmin_vx_i64m1_m | ( | ... | ) | __riscv_vmin_vx_i64m1_tumu(__VA_ARGS__) |
| #define vmin_vx_i64m2 | ( | ... | ) | __riscv_vmin_vx_i64m2(__VA_ARGS__) |
| #define vmin_vx_i64m2_m | ( | ... | ) | __riscv_vmin_vx_i64m2_tumu(__VA_ARGS__) |
| #define vmin_vx_i64m4 | ( | ... | ) | __riscv_vmin_vx_i64m4(__VA_ARGS__) |
| #define vmin_vx_i64m4_m | ( | ... | ) | __riscv_vmin_vx_i64m4_tumu(__VA_ARGS__) |
| #define vmin_vx_i64m8 | ( | ... | ) | __riscv_vmin_vx_i64m8(__VA_ARGS__) |
| #define vmin_vx_i64m8_m | ( | ... | ) | __riscv_vmin_vx_i64m8_tumu(__VA_ARGS__) |
| #define vmin_vx_i8m1 | ( | ... | ) | __riscv_vmin_vx_i8m1(__VA_ARGS__) |
| #define vmin_vx_i8m1_m | ( | ... | ) | __riscv_vmin_vx_i8m1_tumu(__VA_ARGS__) |
| #define vmin_vx_i8m2 | ( | ... | ) | __riscv_vmin_vx_i8m2(__VA_ARGS__) |
| #define vmin_vx_i8m2_m | ( | ... | ) | __riscv_vmin_vx_i8m2_tumu(__VA_ARGS__) |
| #define vmin_vx_i8m4 | ( | ... | ) | __riscv_vmin_vx_i8m4(__VA_ARGS__) |
| #define vmin_vx_i8m4_m | ( | ... | ) | __riscv_vmin_vx_i8m4_tumu(__VA_ARGS__) |
| #define vmin_vx_i8m8 | ( | ... | ) | __riscv_vmin_vx_i8m8(__VA_ARGS__) |
| #define vmin_vx_i8m8_m | ( | ... | ) | __riscv_vmin_vx_i8m8_tumu(__VA_ARGS__) |
| #define vmin_vx_i8mf2 | ( | ... | ) | __riscv_vmin_vx_i8mf2(__VA_ARGS__) |
| #define vmin_vx_i8mf2_m | ( | ... | ) | __riscv_vmin_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vmin_vx_i8mf4 | ( | ... | ) | __riscv_vmin_vx_i8mf4(__VA_ARGS__) |
| #define vmin_vx_i8mf4_m | ( | ... | ) | __riscv_vmin_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vmin_vx_i8mf8 | ( | ... | ) | __riscv_vmin_vx_i8mf8(__VA_ARGS__) |
| #define vmin_vx_i8mf8_m | ( | ... | ) | __riscv_vmin_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vminu_vv_u16m1 | ( | ... | ) | __riscv_vminu_vv_u16m1(__VA_ARGS__) |
| #define vminu_vv_u16m1_m | ( | ... | ) | __riscv_vminu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vminu_vv_u16m2 | ( | ... | ) | __riscv_vminu_vv_u16m2(__VA_ARGS__) |
| #define vminu_vv_u16m2_m | ( | ... | ) | __riscv_vminu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vminu_vv_u16m4 | ( | ... | ) | __riscv_vminu_vv_u16m4(__VA_ARGS__) |
| #define vminu_vv_u16m4_m | ( | ... | ) | __riscv_vminu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vminu_vv_u16m8 | ( | ... | ) | __riscv_vminu_vv_u16m8(__VA_ARGS__) |
| #define vminu_vv_u16m8_m | ( | ... | ) | __riscv_vminu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vminu_vv_u16mf2 | ( | ... | ) | __riscv_vminu_vv_u16mf2(__VA_ARGS__) |
| #define vminu_vv_u16mf2_m | ( | ... | ) | __riscv_vminu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vminu_vv_u16mf4 | ( | ... | ) | __riscv_vminu_vv_u16mf4(__VA_ARGS__) |
| #define vminu_vv_u16mf4_m | ( | ... | ) | __riscv_vminu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vminu_vv_u32m1 | ( | ... | ) | __riscv_vminu_vv_u32m1(__VA_ARGS__) |
| #define vminu_vv_u32m1_m | ( | ... | ) | __riscv_vminu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vminu_vv_u32m2 | ( | ... | ) | __riscv_vminu_vv_u32m2(__VA_ARGS__) |
| #define vminu_vv_u32m2_m | ( | ... | ) | __riscv_vminu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vminu_vv_u32m4 | ( | ... | ) | __riscv_vminu_vv_u32m4(__VA_ARGS__) |
| #define vminu_vv_u32m4_m | ( | ... | ) | __riscv_vminu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vminu_vv_u32m8 | ( | ... | ) | __riscv_vminu_vv_u32m8(__VA_ARGS__) |
| #define vminu_vv_u32m8_m | ( | ... | ) | __riscv_vminu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vminu_vv_u32mf2 | ( | ... | ) | __riscv_vminu_vv_u32mf2(__VA_ARGS__) |
| #define vminu_vv_u32mf2_m | ( | ... | ) | __riscv_vminu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vminu_vv_u64m1 | ( | ... | ) | __riscv_vminu_vv_u64m1(__VA_ARGS__) |
| #define vminu_vv_u64m1_m | ( | ... | ) | __riscv_vminu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vminu_vv_u64m2 | ( | ... | ) | __riscv_vminu_vv_u64m2(__VA_ARGS__) |
| #define vminu_vv_u64m2_m | ( | ... | ) | __riscv_vminu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vminu_vv_u64m4 | ( | ... | ) | __riscv_vminu_vv_u64m4(__VA_ARGS__) |
| #define vminu_vv_u64m4_m | ( | ... | ) | __riscv_vminu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vminu_vv_u64m8 | ( | ... | ) | __riscv_vminu_vv_u64m8(__VA_ARGS__) |
| #define vminu_vv_u64m8_m | ( | ... | ) | __riscv_vminu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vminu_vv_u8m1 | ( | ... | ) | __riscv_vminu_vv_u8m1(__VA_ARGS__) |
| #define vminu_vv_u8m1_m | ( | ... | ) | __riscv_vminu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vminu_vv_u8m2 | ( | ... | ) | __riscv_vminu_vv_u8m2(__VA_ARGS__) |
| #define vminu_vv_u8m2_m | ( | ... | ) | __riscv_vminu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vminu_vv_u8m4 | ( | ... | ) | __riscv_vminu_vv_u8m4(__VA_ARGS__) |
| #define vminu_vv_u8m4_m | ( | ... | ) | __riscv_vminu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vminu_vv_u8m8 | ( | ... | ) | __riscv_vminu_vv_u8m8(__VA_ARGS__) |
| #define vminu_vv_u8m8_m | ( | ... | ) | __riscv_vminu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vminu_vv_u8mf2 | ( | ... | ) | __riscv_vminu_vv_u8mf2(__VA_ARGS__) |
| #define vminu_vv_u8mf2_m | ( | ... | ) | __riscv_vminu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vminu_vv_u8mf4 | ( | ... | ) | __riscv_vminu_vv_u8mf4(__VA_ARGS__) |
| #define vminu_vv_u8mf4_m | ( | ... | ) | __riscv_vminu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vminu_vv_u8mf8 | ( | ... | ) | __riscv_vminu_vv_u8mf8(__VA_ARGS__) |
| #define vminu_vv_u8mf8_m | ( | ... | ) | __riscv_vminu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vminu_vx_u16m1 | ( | ... | ) | __riscv_vminu_vx_u16m1(__VA_ARGS__) |
| #define vminu_vx_u16m1_m | ( | ... | ) | __riscv_vminu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vminu_vx_u16m2 | ( | ... | ) | __riscv_vminu_vx_u16m2(__VA_ARGS__) |
| #define vminu_vx_u16m2_m | ( | ... | ) | __riscv_vminu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vminu_vx_u16m4 | ( | ... | ) | __riscv_vminu_vx_u16m4(__VA_ARGS__) |
| #define vminu_vx_u16m4_m | ( | ... | ) | __riscv_vminu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vminu_vx_u16m8 | ( | ... | ) | __riscv_vminu_vx_u16m8(__VA_ARGS__) |
| #define vminu_vx_u16m8_m | ( | ... | ) | __riscv_vminu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vminu_vx_u16mf2 | ( | ... | ) | __riscv_vminu_vx_u16mf2(__VA_ARGS__) |
| #define vminu_vx_u16mf2_m | ( | ... | ) | __riscv_vminu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vminu_vx_u16mf4 | ( | ... | ) | __riscv_vminu_vx_u16mf4(__VA_ARGS__) |
| #define vminu_vx_u16mf4_m | ( | ... | ) | __riscv_vminu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vminu_vx_u32m1 | ( | ... | ) | __riscv_vminu_vx_u32m1(__VA_ARGS__) |
| #define vminu_vx_u32m1_m | ( | ... | ) | __riscv_vminu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vminu_vx_u32m2 | ( | ... | ) | __riscv_vminu_vx_u32m2(__VA_ARGS__) |
| #define vminu_vx_u32m2_m | ( | ... | ) | __riscv_vminu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vminu_vx_u32m4 | ( | ... | ) | __riscv_vminu_vx_u32m4(__VA_ARGS__) |
| #define vminu_vx_u32m4_m | ( | ... | ) | __riscv_vminu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vminu_vx_u32m8 | ( | ... | ) | __riscv_vminu_vx_u32m8(__VA_ARGS__) |
| #define vminu_vx_u32m8_m | ( | ... | ) | __riscv_vminu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vminu_vx_u32mf2 | ( | ... | ) | __riscv_vminu_vx_u32mf2(__VA_ARGS__) |
| #define vminu_vx_u32mf2_m | ( | ... | ) | __riscv_vminu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vminu_vx_u64m1 | ( | ... | ) | __riscv_vminu_vx_u64m1(__VA_ARGS__) |
| #define vminu_vx_u64m1_m | ( | ... | ) | __riscv_vminu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vminu_vx_u64m2 | ( | ... | ) | __riscv_vminu_vx_u64m2(__VA_ARGS__) |
| #define vminu_vx_u64m2_m | ( | ... | ) | __riscv_vminu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vminu_vx_u64m4 | ( | ... | ) | __riscv_vminu_vx_u64m4(__VA_ARGS__) |
| #define vminu_vx_u64m4_m | ( | ... | ) | __riscv_vminu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vminu_vx_u64m8 | ( | ... | ) | __riscv_vminu_vx_u64m8(__VA_ARGS__) |
| #define vminu_vx_u64m8_m | ( | ... | ) | __riscv_vminu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vminu_vx_u8m1 | ( | ... | ) | __riscv_vminu_vx_u8m1(__VA_ARGS__) |
| #define vminu_vx_u8m1_m | ( | ... | ) | __riscv_vminu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vminu_vx_u8m2 | ( | ... | ) | __riscv_vminu_vx_u8m2(__VA_ARGS__) |
| #define vminu_vx_u8m2_m | ( | ... | ) | __riscv_vminu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vminu_vx_u8m4 | ( | ... | ) | __riscv_vminu_vx_u8m4(__VA_ARGS__) |
| #define vminu_vx_u8m4_m | ( | ... | ) | __riscv_vminu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vminu_vx_u8m8 | ( | ... | ) | __riscv_vminu_vx_u8m8(__VA_ARGS__) |
| #define vminu_vx_u8m8_m | ( | ... | ) | __riscv_vminu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vminu_vx_u8mf2 | ( | ... | ) | __riscv_vminu_vx_u8mf2(__VA_ARGS__) |
| #define vminu_vx_u8mf2_m | ( | ... | ) | __riscv_vminu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vminu_vx_u8mf4 | ( | ... | ) | __riscv_vminu_vx_u8mf4(__VA_ARGS__) |
| #define vminu_vx_u8mf4_m | ( | ... | ) | __riscv_vminu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vminu_vx_u8mf8 | ( | ... | ) | __riscv_vminu_vx_u8mf8(__VA_ARGS__) |
| #define vminu_vx_u8mf8_m | ( | ... | ) | __riscv_vminu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vmmv_m_b1 | ( | ... | ) | __riscv_vmmv_m_b1(__VA_ARGS__) |
| #define vmmv_m_b16 | ( | ... | ) | __riscv_vmmv_m_b16(__VA_ARGS__) |
| #define vmmv_m_b2 | ( | ... | ) | __riscv_vmmv_m_b2(__VA_ARGS__) |
| #define vmmv_m_b32 | ( | ... | ) | __riscv_vmmv_m_b32(__VA_ARGS__) |
| #define vmmv_m_b4 | ( | ... | ) | __riscv_vmmv_m_b4(__VA_ARGS__) |
| #define vmmv_m_b64 | ( | ... | ) | __riscv_vmmv_m_b64(__VA_ARGS__) |
| #define vmmv_m_b8 | ( | ... | ) | __riscv_vmmv_m_b8(__VA_ARGS__) |
| #define vmnand_mm_b1 | ( | ... | ) | __riscv_vmnand_mm_b1(__VA_ARGS__) |
| #define vmnand_mm_b16 | ( | ... | ) | __riscv_vmnand_mm_b16(__VA_ARGS__) |
| #define vmnand_mm_b2 | ( | ... | ) | __riscv_vmnand_mm_b2(__VA_ARGS__) |
| #define vmnand_mm_b32 | ( | ... | ) | __riscv_vmnand_mm_b32(__VA_ARGS__) |
| #define vmnand_mm_b4 | ( | ... | ) | __riscv_vmnand_mm_b4(__VA_ARGS__) |
| #define vmnand_mm_b64 | ( | ... | ) | __riscv_vmnand_mm_b64(__VA_ARGS__) |
| #define vmnand_mm_b8 | ( | ... | ) | __riscv_vmnand_mm_b8(__VA_ARGS__) |
| #define vmnor_mm_b1 | ( | ... | ) | __riscv_vmnor_mm_b1(__VA_ARGS__) |
| #define vmnor_mm_b16 | ( | ... | ) | __riscv_vmnor_mm_b16(__VA_ARGS__) |
| #define vmnor_mm_b2 | ( | ... | ) | __riscv_vmnor_mm_b2(__VA_ARGS__) |
| #define vmnor_mm_b32 | ( | ... | ) | __riscv_vmnor_mm_b32(__VA_ARGS__) |
| #define vmnor_mm_b4 | ( | ... | ) | __riscv_vmnor_mm_b4(__VA_ARGS__) |
| #define vmnor_mm_b64 | ( | ... | ) | __riscv_vmnor_mm_b64(__VA_ARGS__) |
| #define vmnor_mm_b8 | ( | ... | ) | __riscv_vmnor_mm_b8(__VA_ARGS__) |
| #define vmnot_m_b1 | ( | ... | ) | __riscv_vmnot_m_b1(__VA_ARGS__) |
| #define vmnot_m_b16 | ( | ... | ) | __riscv_vmnot_m_b16(__VA_ARGS__) |
| #define vmnot_m_b2 | ( | ... | ) | __riscv_vmnot_m_b2(__VA_ARGS__) |
| #define vmnot_m_b32 | ( | ... | ) | __riscv_vmnot_m_b32(__VA_ARGS__) |
| #define vmnot_m_b4 | ( | ... | ) | __riscv_vmnot_m_b4(__VA_ARGS__) |
| #define vmnot_m_b64 | ( | ... | ) | __riscv_vmnot_m_b64(__VA_ARGS__) |
| #define vmnot_m_b8 | ( | ... | ) | __riscv_vmnot_m_b8(__VA_ARGS__) |
| #define vmor_mm_b1 | ( | ... | ) | __riscv_vmor_mm_b1(__VA_ARGS__) |
| #define vmor_mm_b16 | ( | ... | ) | __riscv_vmor_mm_b16(__VA_ARGS__) |
| #define vmor_mm_b2 | ( | ... | ) | __riscv_vmor_mm_b2(__VA_ARGS__) |
| #define vmor_mm_b32 | ( | ... | ) | __riscv_vmor_mm_b32(__VA_ARGS__) |
| #define vmor_mm_b4 | ( | ... | ) | __riscv_vmor_mm_b4(__VA_ARGS__) |
| #define vmor_mm_b64 | ( | ... | ) | __riscv_vmor_mm_b64(__VA_ARGS__) |
| #define vmor_mm_b8 | ( | ... | ) | __riscv_vmor_mm_b8(__VA_ARGS__) |
| #define vmorn_mm_b1 | ( | ... | ) | __riscv_vmorn_mm_b1(__VA_ARGS__) |
| #define vmorn_mm_b16 | ( | ... | ) | __riscv_vmorn_mm_b16(__VA_ARGS__) |
| #define vmorn_mm_b2 | ( | ... | ) | __riscv_vmorn_mm_b2(__VA_ARGS__) |
| #define vmorn_mm_b32 | ( | ... | ) | __riscv_vmorn_mm_b32(__VA_ARGS__) |
| #define vmorn_mm_b4 | ( | ... | ) | __riscv_vmorn_mm_b4(__VA_ARGS__) |
| #define vmorn_mm_b64 | ( | ... | ) | __riscv_vmorn_mm_b64(__VA_ARGS__) |
| #define vmorn_mm_b8 | ( | ... | ) | __riscv_vmorn_mm_b8(__VA_ARGS__) |
| #define vmsbc_vv_i16m1_b16 | ( | ... | ) | __riscv_vmsbc_vv_i16m1_b16(__VA_ARGS__) |
| #define vmsbc_vv_i16m2_b8 | ( | ... | ) | __riscv_vmsbc_vv_i16m2_b8(__VA_ARGS__) |
| #define vmsbc_vv_i16m4_b4 | ( | ... | ) | __riscv_vmsbc_vv_i16m4_b4(__VA_ARGS__) |
| #define vmsbc_vv_i16m8_b2 | ( | ... | ) | __riscv_vmsbc_vv_i16m8_b2(__VA_ARGS__) |
| #define vmsbc_vv_i16mf2_b32 | ( | ... | ) | __riscv_vmsbc_vv_i16mf2_b32(__VA_ARGS__) |
| #define vmsbc_vv_i16mf4_b64 | ( | ... | ) | __riscv_vmsbc_vv_i16mf4_b64(__VA_ARGS__) |
| #define vmsbc_vv_i32m1_b32 | ( | ... | ) | __riscv_vmsbc_vv_i32m1_b32(__VA_ARGS__) |
| #define vmsbc_vv_i32m2_b16 | ( | ... | ) | __riscv_vmsbc_vv_i32m2_b16(__VA_ARGS__) |
| #define vmsbc_vv_i32m4_b8 | ( | ... | ) | __riscv_vmsbc_vv_i32m4_b8(__VA_ARGS__) |
| #define vmsbc_vv_i32m8_b4 | ( | ... | ) | __riscv_vmsbc_vv_i32m8_b4(__VA_ARGS__) |
| #define vmsbc_vv_i32mf2_b64 | ( | ... | ) | __riscv_vmsbc_vv_i32mf2_b64(__VA_ARGS__) |
| #define vmsbc_vv_i64m1_b64 | ( | ... | ) | __riscv_vmsbc_vv_i64m1_b64(__VA_ARGS__) |
| #define vmsbc_vv_i64m2_b32 | ( | ... | ) | __riscv_vmsbc_vv_i64m2_b32(__VA_ARGS__) |
| #define vmsbc_vv_i64m4_b16 | ( | ... | ) | __riscv_vmsbc_vv_i64m4_b16(__VA_ARGS__) |
| #define vmsbc_vv_i64m8_b8 | ( | ... | ) | __riscv_vmsbc_vv_i64m8_b8(__VA_ARGS__) |
| #define vmsbc_vv_i8m1_b8 | ( | ... | ) | __riscv_vmsbc_vv_i8m1_b8(__VA_ARGS__) |
| #define vmsbc_vv_i8m2_b4 | ( | ... | ) | __riscv_vmsbc_vv_i8m2_b4(__VA_ARGS__) |
| #define vmsbc_vv_i8m4_b2 | ( | ... | ) | __riscv_vmsbc_vv_i8m4_b2(__VA_ARGS__) |
| #define vmsbc_vv_i8m8_b1 | ( | ... | ) | __riscv_vmsbc_vv_i8m8_b1(__VA_ARGS__) |
| #define vmsbc_vv_i8mf2_b16 | ( | ... | ) | __riscv_vmsbc_vv_i8mf2_b16(__VA_ARGS__) |
| #define vmsbc_vv_i8mf4_b32 | ( | ... | ) | __riscv_vmsbc_vv_i8mf4_b32(__VA_ARGS__) |
| #define vmsbc_vv_i8mf8_b64 | ( | ... | ) | __riscv_vmsbc_vv_i8mf8_b64(__VA_ARGS__) |
| #define vmsbc_vv_u16m1_b16 | ( | ... | ) | __riscv_vmsbc_vv_u16m1_b16(__VA_ARGS__) |
| #define vmsbc_vv_u16m2_b8 | ( | ... | ) | __riscv_vmsbc_vv_u16m2_b8(__VA_ARGS__) |
| #define vmsbc_vv_u16m4_b4 | ( | ... | ) | __riscv_vmsbc_vv_u16m4_b4(__VA_ARGS__) |
| #define vmsbc_vv_u16m8_b2 | ( | ... | ) | __riscv_vmsbc_vv_u16m8_b2(__VA_ARGS__) |
| #define vmsbc_vv_u16mf2_b32 | ( | ... | ) | __riscv_vmsbc_vv_u16mf2_b32(__VA_ARGS__) |
| #define vmsbc_vv_u16mf4_b64 | ( | ... | ) | __riscv_vmsbc_vv_u16mf4_b64(__VA_ARGS__) |
| #define vmsbc_vv_u32m1_b32 | ( | ... | ) | __riscv_vmsbc_vv_u32m1_b32(__VA_ARGS__) |
| #define vmsbc_vv_u32m2_b16 | ( | ... | ) | __riscv_vmsbc_vv_u32m2_b16(__VA_ARGS__) |
| #define vmsbc_vv_u32m4_b8 | ( | ... | ) | __riscv_vmsbc_vv_u32m4_b8(__VA_ARGS__) |
| #define vmsbc_vv_u32m8_b4 | ( | ... | ) | __riscv_vmsbc_vv_u32m8_b4(__VA_ARGS__) |
| #define vmsbc_vv_u32mf2_b64 | ( | ... | ) | __riscv_vmsbc_vv_u32mf2_b64(__VA_ARGS__) |
| #define vmsbc_vv_u64m1_b64 | ( | ... | ) | __riscv_vmsbc_vv_u64m1_b64(__VA_ARGS__) |
| #define vmsbc_vv_u64m2_b32 | ( | ... | ) | __riscv_vmsbc_vv_u64m2_b32(__VA_ARGS__) |
| #define vmsbc_vv_u64m4_b16 | ( | ... | ) | __riscv_vmsbc_vv_u64m4_b16(__VA_ARGS__) |
| #define vmsbc_vv_u64m8_b8 | ( | ... | ) | __riscv_vmsbc_vv_u64m8_b8(__VA_ARGS__) |
| #define vmsbc_vv_u8m1_b8 | ( | ... | ) | __riscv_vmsbc_vv_u8m1_b8(__VA_ARGS__) |
| #define vmsbc_vv_u8m2_b4 | ( | ... | ) | __riscv_vmsbc_vv_u8m2_b4(__VA_ARGS__) |
| #define vmsbc_vv_u8m4_b2 | ( | ... | ) | __riscv_vmsbc_vv_u8m4_b2(__VA_ARGS__) |
| #define vmsbc_vv_u8m8_b1 | ( | ... | ) | __riscv_vmsbc_vv_u8m8_b1(__VA_ARGS__) |
| #define vmsbc_vv_u8mf2_b16 | ( | ... | ) | __riscv_vmsbc_vv_u8mf2_b16(__VA_ARGS__) |
| #define vmsbc_vv_u8mf4_b32 | ( | ... | ) | __riscv_vmsbc_vv_u8mf4_b32(__VA_ARGS__) |
| #define vmsbc_vv_u8mf8_b64 | ( | ... | ) | __riscv_vmsbc_vv_u8mf8_b64(__VA_ARGS__) |
| #define vmsbc_vvm_i16m1_b16 | ( | ... | ) | __riscv_vmsbc_vvm_i16m1_b16(__VA_ARGS__) |
| #define vmsbc_vvm_i16m2_b8 | ( | ... | ) | __riscv_vmsbc_vvm_i16m2_b8(__VA_ARGS__) |
| #define vmsbc_vvm_i16m4_b4 | ( | ... | ) | __riscv_vmsbc_vvm_i16m4_b4(__VA_ARGS__) |
| #define vmsbc_vvm_i16m8_b2 | ( | ... | ) | __riscv_vmsbc_vvm_i16m8_b2(__VA_ARGS__) |
| #define vmsbc_vvm_i16mf2_b32 | ( | ... | ) | __riscv_vmsbc_vvm_i16mf2_b32(__VA_ARGS__) |
| #define vmsbc_vvm_i16mf4_b64 | ( | ... | ) | __riscv_vmsbc_vvm_i16mf4_b64(__VA_ARGS__) |
| #define vmsbc_vvm_i32m1_b32 | ( | ... | ) | __riscv_vmsbc_vvm_i32m1_b32(__VA_ARGS__) |
| #define vmsbc_vvm_i32m2_b16 | ( | ... | ) | __riscv_vmsbc_vvm_i32m2_b16(__VA_ARGS__) |
| #define vmsbc_vvm_i32m4_b8 | ( | ... | ) | __riscv_vmsbc_vvm_i32m4_b8(__VA_ARGS__) |
| #define vmsbc_vvm_i32m8_b4 | ( | ... | ) | __riscv_vmsbc_vvm_i32m8_b4(__VA_ARGS__) |
| #define vmsbc_vvm_i32mf2_b64 | ( | ... | ) | __riscv_vmsbc_vvm_i32mf2_b64(__VA_ARGS__) |
| #define vmsbc_vvm_i64m1_b64 | ( | ... | ) | __riscv_vmsbc_vvm_i64m1_b64(__VA_ARGS__) |
| #define vmsbc_vvm_i64m2_b32 | ( | ... | ) | __riscv_vmsbc_vvm_i64m2_b32(__VA_ARGS__) |
| #define vmsbc_vvm_i64m4_b16 | ( | ... | ) | __riscv_vmsbc_vvm_i64m4_b16(__VA_ARGS__) |
| #define vmsbc_vvm_i64m8_b8 | ( | ... | ) | __riscv_vmsbc_vvm_i64m8_b8(__VA_ARGS__) |
| #define vmsbc_vvm_i8m1_b8 | ( | ... | ) | __riscv_vmsbc_vvm_i8m1_b8(__VA_ARGS__) |
| #define vmsbc_vvm_i8m2_b4 | ( | ... | ) | __riscv_vmsbc_vvm_i8m2_b4(__VA_ARGS__) |
| #define vmsbc_vvm_i8m4_b2 | ( | ... | ) | __riscv_vmsbc_vvm_i8m4_b2(__VA_ARGS__) |
| #define vmsbc_vvm_i8m8_b1 | ( | ... | ) | __riscv_vmsbc_vvm_i8m8_b1(__VA_ARGS__) |
| #define vmsbc_vvm_i8mf2_b16 | ( | ... | ) | __riscv_vmsbc_vvm_i8mf2_b16(__VA_ARGS__) |
| #define vmsbc_vvm_i8mf4_b32 | ( | ... | ) | __riscv_vmsbc_vvm_i8mf4_b32(__VA_ARGS__) |
| #define vmsbc_vvm_i8mf8_b64 | ( | ... | ) | __riscv_vmsbc_vvm_i8mf8_b64(__VA_ARGS__) |
| #define vmsbc_vvm_u16m1_b16 | ( | ... | ) | __riscv_vmsbc_vvm_u16m1_b16(__VA_ARGS__) |
| #define vmsbc_vvm_u16m2_b8 | ( | ... | ) | __riscv_vmsbc_vvm_u16m2_b8(__VA_ARGS__) |
| #define vmsbc_vvm_u16m4_b4 | ( | ... | ) | __riscv_vmsbc_vvm_u16m4_b4(__VA_ARGS__) |
| #define vmsbc_vvm_u16m8_b2 | ( | ... | ) | __riscv_vmsbc_vvm_u16m8_b2(__VA_ARGS__) |
| #define vmsbc_vvm_u16mf2_b32 | ( | ... | ) | __riscv_vmsbc_vvm_u16mf2_b32(__VA_ARGS__) |
| #define vmsbc_vvm_u16mf4_b64 | ( | ... | ) | __riscv_vmsbc_vvm_u16mf4_b64(__VA_ARGS__) |
| #define vmsbc_vvm_u32m1_b32 | ( | ... | ) | __riscv_vmsbc_vvm_u32m1_b32(__VA_ARGS__) |
| #define vmsbc_vvm_u32m2_b16 | ( | ... | ) | __riscv_vmsbc_vvm_u32m2_b16(__VA_ARGS__) |
| #define vmsbc_vvm_u32m4_b8 | ( | ... | ) | __riscv_vmsbc_vvm_u32m4_b8(__VA_ARGS__) |
| #define vmsbc_vvm_u32m8_b4 | ( | ... | ) | __riscv_vmsbc_vvm_u32m8_b4(__VA_ARGS__) |
| #define vmsbc_vvm_u32mf2_b64 | ( | ... | ) | __riscv_vmsbc_vvm_u32mf2_b64(__VA_ARGS__) |
| #define vmsbc_vvm_u64m1_b64 | ( | ... | ) | __riscv_vmsbc_vvm_u64m1_b64(__VA_ARGS__) |
| #define vmsbc_vvm_u64m2_b32 | ( | ... | ) | __riscv_vmsbc_vvm_u64m2_b32(__VA_ARGS__) |
| #define vmsbc_vvm_u64m4_b16 | ( | ... | ) | __riscv_vmsbc_vvm_u64m4_b16(__VA_ARGS__) |
| #define vmsbc_vvm_u64m8_b8 | ( | ... | ) | __riscv_vmsbc_vvm_u64m8_b8(__VA_ARGS__) |
| #define vmsbc_vvm_u8m1_b8 | ( | ... | ) | __riscv_vmsbc_vvm_u8m1_b8(__VA_ARGS__) |
| #define vmsbc_vvm_u8m2_b4 | ( | ... | ) | __riscv_vmsbc_vvm_u8m2_b4(__VA_ARGS__) |
| #define vmsbc_vvm_u8m4_b2 | ( | ... | ) | __riscv_vmsbc_vvm_u8m4_b2(__VA_ARGS__) |
| #define vmsbc_vvm_u8m8_b1 | ( | ... | ) | __riscv_vmsbc_vvm_u8m8_b1(__VA_ARGS__) |
| #define vmsbc_vvm_u8mf2_b16 | ( | ... | ) | __riscv_vmsbc_vvm_u8mf2_b16(__VA_ARGS__) |
| #define vmsbc_vvm_u8mf4_b32 | ( | ... | ) | __riscv_vmsbc_vvm_u8mf4_b32(__VA_ARGS__) |
| #define vmsbc_vvm_u8mf8_b64 | ( | ... | ) | __riscv_vmsbc_vvm_u8mf8_b64(__VA_ARGS__) |
| #define vmsbc_vx_i16m1_b16 | ( | ... | ) | __riscv_vmsbc_vx_i16m1_b16(__VA_ARGS__) |
| #define vmsbc_vx_i16m2_b8 | ( | ... | ) | __riscv_vmsbc_vx_i16m2_b8(__VA_ARGS__) |
| #define vmsbc_vx_i16m4_b4 | ( | ... | ) | __riscv_vmsbc_vx_i16m4_b4(__VA_ARGS__) |
| #define vmsbc_vx_i16m8_b2 | ( | ... | ) | __riscv_vmsbc_vx_i16m8_b2(__VA_ARGS__) |
| #define vmsbc_vx_i16mf2_b32 | ( | ... | ) | __riscv_vmsbc_vx_i16mf2_b32(__VA_ARGS__) |
| #define vmsbc_vx_i16mf4_b64 | ( | ... | ) | __riscv_vmsbc_vx_i16mf4_b64(__VA_ARGS__) |
| #define vmsbc_vx_i32m1_b32 | ( | ... | ) | __riscv_vmsbc_vx_i32m1_b32(__VA_ARGS__) |
| #define vmsbc_vx_i32m2_b16 | ( | ... | ) | __riscv_vmsbc_vx_i32m2_b16(__VA_ARGS__) |
| #define vmsbc_vx_i32m4_b8 | ( | ... | ) | __riscv_vmsbc_vx_i32m4_b8(__VA_ARGS__) |
| #define vmsbc_vx_i32m8_b4 | ( | ... | ) | __riscv_vmsbc_vx_i32m8_b4(__VA_ARGS__) |
| #define vmsbc_vx_i32mf2_b64 | ( | ... | ) | __riscv_vmsbc_vx_i32mf2_b64(__VA_ARGS__) |
| #define vmsbc_vx_i64m1_b64 | ( | ... | ) | __riscv_vmsbc_vx_i64m1_b64(__VA_ARGS__) |
| #define vmsbc_vx_i64m2_b32 | ( | ... | ) | __riscv_vmsbc_vx_i64m2_b32(__VA_ARGS__) |
| #define vmsbc_vx_i64m4_b16 | ( | ... | ) | __riscv_vmsbc_vx_i64m4_b16(__VA_ARGS__) |
| #define vmsbc_vx_i64m8_b8 | ( | ... | ) | __riscv_vmsbc_vx_i64m8_b8(__VA_ARGS__) |
| #define vmsbc_vx_i8m1_b8 | ( | ... | ) | __riscv_vmsbc_vx_i8m1_b8(__VA_ARGS__) |
| #define vmsbc_vx_i8m2_b4 | ( | ... | ) | __riscv_vmsbc_vx_i8m2_b4(__VA_ARGS__) |
| #define vmsbc_vx_i8m4_b2 | ( | ... | ) | __riscv_vmsbc_vx_i8m4_b2(__VA_ARGS__) |
| #define vmsbc_vx_i8m8_b1 | ( | ... | ) | __riscv_vmsbc_vx_i8m8_b1(__VA_ARGS__) |
| #define vmsbc_vx_i8mf2_b16 | ( | ... | ) | __riscv_vmsbc_vx_i8mf2_b16(__VA_ARGS__) |
| #define vmsbc_vx_i8mf4_b32 | ( | ... | ) | __riscv_vmsbc_vx_i8mf4_b32(__VA_ARGS__) |
| #define vmsbc_vx_i8mf8_b64 | ( | ... | ) | __riscv_vmsbc_vx_i8mf8_b64(__VA_ARGS__) |
| #define vmsbc_vx_u16m1_b16 | ( | ... | ) | __riscv_vmsbc_vx_u16m1_b16(__VA_ARGS__) |
| #define vmsbc_vx_u16m2_b8 | ( | ... | ) | __riscv_vmsbc_vx_u16m2_b8(__VA_ARGS__) |
| #define vmsbc_vx_u16m4_b4 | ( | ... | ) | __riscv_vmsbc_vx_u16m4_b4(__VA_ARGS__) |
| #define vmsbc_vx_u16m8_b2 | ( | ... | ) | __riscv_vmsbc_vx_u16m8_b2(__VA_ARGS__) |
| #define vmsbc_vx_u16mf2_b32 | ( | ... | ) | __riscv_vmsbc_vx_u16mf2_b32(__VA_ARGS__) |
| #define vmsbc_vx_u16mf4_b64 | ( | ... | ) | __riscv_vmsbc_vx_u16mf4_b64(__VA_ARGS__) |
| #define vmsbc_vx_u32m1_b32 | ( | ... | ) | __riscv_vmsbc_vx_u32m1_b32(__VA_ARGS__) |
| #define vmsbc_vx_u32m2_b16 | ( | ... | ) | __riscv_vmsbc_vx_u32m2_b16(__VA_ARGS__) |
| #define vmsbc_vx_u32m4_b8 | ( | ... | ) | __riscv_vmsbc_vx_u32m4_b8(__VA_ARGS__) |
| #define vmsbc_vx_u32m8_b4 | ( | ... | ) | __riscv_vmsbc_vx_u32m8_b4(__VA_ARGS__) |
| #define vmsbc_vx_u32mf2_b64 | ( | ... | ) | __riscv_vmsbc_vx_u32mf2_b64(__VA_ARGS__) |
| #define vmsbc_vx_u64m1_b64 | ( | ... | ) | __riscv_vmsbc_vx_u64m1_b64(__VA_ARGS__) |
| #define vmsbc_vx_u64m2_b32 | ( | ... | ) | __riscv_vmsbc_vx_u64m2_b32(__VA_ARGS__) |
| #define vmsbc_vx_u64m4_b16 | ( | ... | ) | __riscv_vmsbc_vx_u64m4_b16(__VA_ARGS__) |
| #define vmsbc_vx_u64m8_b8 | ( | ... | ) | __riscv_vmsbc_vx_u64m8_b8(__VA_ARGS__) |
| #define vmsbc_vx_u8m1_b8 | ( | ... | ) | __riscv_vmsbc_vx_u8m1_b8(__VA_ARGS__) |
| #define vmsbc_vx_u8m2_b4 | ( | ... | ) | __riscv_vmsbc_vx_u8m2_b4(__VA_ARGS__) |
| #define vmsbc_vx_u8m4_b2 | ( | ... | ) | __riscv_vmsbc_vx_u8m4_b2(__VA_ARGS__) |
| #define vmsbc_vx_u8m8_b1 | ( | ... | ) | __riscv_vmsbc_vx_u8m8_b1(__VA_ARGS__) |
| #define vmsbc_vx_u8mf2_b16 | ( | ... | ) | __riscv_vmsbc_vx_u8mf2_b16(__VA_ARGS__) |
| #define vmsbc_vx_u8mf4_b32 | ( | ... | ) | __riscv_vmsbc_vx_u8mf4_b32(__VA_ARGS__) |
| #define vmsbc_vx_u8mf8_b64 | ( | ... | ) | __riscv_vmsbc_vx_u8mf8_b64(__VA_ARGS__) |
| #define vmsbc_vxm_i16m1_b16 | ( | ... | ) | __riscv_vmsbc_vxm_i16m1_b16(__VA_ARGS__) |
| #define vmsbc_vxm_i16m2_b8 | ( | ... | ) | __riscv_vmsbc_vxm_i16m2_b8(__VA_ARGS__) |
| #define vmsbc_vxm_i16m4_b4 | ( | ... | ) | __riscv_vmsbc_vxm_i16m4_b4(__VA_ARGS__) |
| #define vmsbc_vxm_i16m8_b2 | ( | ... | ) | __riscv_vmsbc_vxm_i16m8_b2(__VA_ARGS__) |
| #define vmsbc_vxm_i16mf2_b32 | ( | ... | ) | __riscv_vmsbc_vxm_i16mf2_b32(__VA_ARGS__) |
| #define vmsbc_vxm_i16mf4_b64 | ( | ... | ) | __riscv_vmsbc_vxm_i16mf4_b64(__VA_ARGS__) |
| #define vmsbc_vxm_i32m1_b32 | ( | ... | ) | __riscv_vmsbc_vxm_i32m1_b32(__VA_ARGS__) |
| #define vmsbc_vxm_i32m2_b16 | ( | ... | ) | __riscv_vmsbc_vxm_i32m2_b16(__VA_ARGS__) |
| #define vmsbc_vxm_i32m4_b8 | ( | ... | ) | __riscv_vmsbc_vxm_i32m4_b8(__VA_ARGS__) |
| #define vmsbc_vxm_i32m8_b4 | ( | ... | ) | __riscv_vmsbc_vxm_i32m8_b4(__VA_ARGS__) |
| #define vmsbc_vxm_i32mf2_b64 | ( | ... | ) | __riscv_vmsbc_vxm_i32mf2_b64(__VA_ARGS__) |
| #define vmsbc_vxm_i64m1_b64 | ( | ... | ) | __riscv_vmsbc_vxm_i64m1_b64(__VA_ARGS__) |
| #define vmsbc_vxm_i64m2_b32 | ( | ... | ) | __riscv_vmsbc_vxm_i64m2_b32(__VA_ARGS__) |
| #define vmsbc_vxm_i64m4_b16 | ( | ... | ) | __riscv_vmsbc_vxm_i64m4_b16(__VA_ARGS__) |
| #define vmsbc_vxm_i64m8_b8 | ( | ... | ) | __riscv_vmsbc_vxm_i64m8_b8(__VA_ARGS__) |
| #define vmsbc_vxm_i8m1_b8 | ( | ... | ) | __riscv_vmsbc_vxm_i8m1_b8(__VA_ARGS__) |
| #define vmsbc_vxm_i8m2_b4 | ( | ... | ) | __riscv_vmsbc_vxm_i8m2_b4(__VA_ARGS__) |
| #define vmsbc_vxm_i8m4_b2 | ( | ... | ) | __riscv_vmsbc_vxm_i8m4_b2(__VA_ARGS__) |
| #define vmsbc_vxm_i8m8_b1 | ( | ... | ) | __riscv_vmsbc_vxm_i8m8_b1(__VA_ARGS__) |
| #define vmsbc_vxm_i8mf2_b16 | ( | ... | ) | __riscv_vmsbc_vxm_i8mf2_b16(__VA_ARGS__) |
| #define vmsbc_vxm_i8mf4_b32 | ( | ... | ) | __riscv_vmsbc_vxm_i8mf4_b32(__VA_ARGS__) |
| #define vmsbc_vxm_i8mf8_b64 | ( | ... | ) | __riscv_vmsbc_vxm_i8mf8_b64(__VA_ARGS__) |
| #define vmsbc_vxm_u16m1_b16 | ( | ... | ) | __riscv_vmsbc_vxm_u16m1_b16(__VA_ARGS__) |
| #define vmsbc_vxm_u16m2_b8 | ( | ... | ) | __riscv_vmsbc_vxm_u16m2_b8(__VA_ARGS__) |
| #define vmsbc_vxm_u16m4_b4 | ( | ... | ) | __riscv_vmsbc_vxm_u16m4_b4(__VA_ARGS__) |
| #define vmsbc_vxm_u16m8_b2 | ( | ... | ) | __riscv_vmsbc_vxm_u16m8_b2(__VA_ARGS__) |
| #define vmsbc_vxm_u16mf2_b32 | ( | ... | ) | __riscv_vmsbc_vxm_u16mf2_b32(__VA_ARGS__) |
| #define vmsbc_vxm_u16mf4_b64 | ( | ... | ) | __riscv_vmsbc_vxm_u16mf4_b64(__VA_ARGS__) |
| #define vmsbc_vxm_u32m1_b32 | ( | ... | ) | __riscv_vmsbc_vxm_u32m1_b32(__VA_ARGS__) |
| #define vmsbc_vxm_u32m2_b16 | ( | ... | ) | __riscv_vmsbc_vxm_u32m2_b16(__VA_ARGS__) |
| #define vmsbc_vxm_u32m4_b8 | ( | ... | ) | __riscv_vmsbc_vxm_u32m4_b8(__VA_ARGS__) |
| #define vmsbc_vxm_u32m8_b4 | ( | ... | ) | __riscv_vmsbc_vxm_u32m8_b4(__VA_ARGS__) |
| #define vmsbc_vxm_u32mf2_b64 | ( | ... | ) | __riscv_vmsbc_vxm_u32mf2_b64(__VA_ARGS__) |
| #define vmsbc_vxm_u64m1_b64 | ( | ... | ) | __riscv_vmsbc_vxm_u64m1_b64(__VA_ARGS__) |
| #define vmsbc_vxm_u64m2_b32 | ( | ... | ) | __riscv_vmsbc_vxm_u64m2_b32(__VA_ARGS__) |
| #define vmsbc_vxm_u64m4_b16 | ( | ... | ) | __riscv_vmsbc_vxm_u64m4_b16(__VA_ARGS__) |
| #define vmsbc_vxm_u64m8_b8 | ( | ... | ) | __riscv_vmsbc_vxm_u64m8_b8(__VA_ARGS__) |
| #define vmsbc_vxm_u8m1_b8 | ( | ... | ) | __riscv_vmsbc_vxm_u8m1_b8(__VA_ARGS__) |
| #define vmsbc_vxm_u8m2_b4 | ( | ... | ) | __riscv_vmsbc_vxm_u8m2_b4(__VA_ARGS__) |
| #define vmsbc_vxm_u8m4_b2 | ( | ... | ) | __riscv_vmsbc_vxm_u8m4_b2(__VA_ARGS__) |
| #define vmsbc_vxm_u8m8_b1 | ( | ... | ) | __riscv_vmsbc_vxm_u8m8_b1(__VA_ARGS__) |
| #define vmsbc_vxm_u8mf2_b16 | ( | ... | ) | __riscv_vmsbc_vxm_u8mf2_b16(__VA_ARGS__) |
| #define vmsbc_vxm_u8mf4_b32 | ( | ... | ) | __riscv_vmsbc_vxm_u8mf4_b32(__VA_ARGS__) |
| #define vmsbc_vxm_u8mf8_b64 | ( | ... | ) | __riscv_vmsbc_vxm_u8mf8_b64(__VA_ARGS__) |
| #define vmsbf_m_b1 | ( | ... | ) | __riscv_vmsbf_m_b1(__VA_ARGS__) |
| #define vmsbf_m_b16 | ( | ... | ) | __riscv_vmsbf_m_b16(__VA_ARGS__) |
| #define vmsbf_m_b16_m | ( | ... | ) | __riscv_vmsbf_m_b16_mu(__VA_ARGS__) |
| #define vmsbf_m_b1_m | ( | ... | ) | __riscv_vmsbf_m_b1_mu(__VA_ARGS__) |
| #define vmsbf_m_b2 | ( | ... | ) | __riscv_vmsbf_m_b2(__VA_ARGS__) |
| #define vmsbf_m_b2_m | ( | ... | ) | __riscv_vmsbf_m_b2_mu(__VA_ARGS__) |
| #define vmsbf_m_b32 | ( | ... | ) | __riscv_vmsbf_m_b32(__VA_ARGS__) |
| #define vmsbf_m_b32_m | ( | ... | ) | __riscv_vmsbf_m_b32_mu(__VA_ARGS__) |
| #define vmsbf_m_b4 | ( | ... | ) | __riscv_vmsbf_m_b4(__VA_ARGS__) |
| #define vmsbf_m_b4_m | ( | ... | ) | __riscv_vmsbf_m_b4_mu(__VA_ARGS__) |
| #define vmsbf_m_b64 | ( | ... | ) | __riscv_vmsbf_m_b64(__VA_ARGS__) |
| #define vmsbf_m_b64_m | ( | ... | ) | __riscv_vmsbf_m_b64_mu(__VA_ARGS__) |
| #define vmsbf_m_b8 | ( | ... | ) | __riscv_vmsbf_m_b8(__VA_ARGS__) |
| #define vmsbf_m_b8_m | ( | ... | ) | __riscv_vmsbf_m_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_i16m1_b16 | ( | ... | ) | __riscv_vmseq_vv_i16m1_b16(__VA_ARGS__) |
| #define vmseq_vv_i16m1_b16_m | ( | ... | ) | __riscv_vmseq_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define vmseq_vv_i16m2_b8 | ( | ... | ) | __riscv_vmseq_vv_i16m2_b8(__VA_ARGS__) |
| #define vmseq_vv_i16m2_b8_m | ( | ... | ) | __riscv_vmseq_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_i16m4_b4 | ( | ... | ) | __riscv_vmseq_vv_i16m4_b4(__VA_ARGS__) |
| #define vmseq_vv_i16m4_b4_m | ( | ... | ) | __riscv_vmseq_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define vmseq_vv_i16m8_b2 | ( | ... | ) | __riscv_vmseq_vv_i16m8_b2(__VA_ARGS__) |
| #define vmseq_vv_i16m8_b2_m | ( | ... | ) | __riscv_vmseq_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define vmseq_vv_i16mf2_b32 | ( | ... | ) | __riscv_vmseq_vv_i16mf2_b32(__VA_ARGS__) |
| #define vmseq_vv_i16mf2_b32_m | ( | ... | ) | __riscv_vmseq_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmseq_vv_i16mf4_b64 | ( | ... | ) | __riscv_vmseq_vv_i16mf4_b64(__VA_ARGS__) |
| #define vmseq_vv_i16mf4_b64_m | ( | ... | ) | __riscv_vmseq_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmseq_vv_i32m1_b32 | ( | ... | ) | __riscv_vmseq_vv_i32m1_b32(__VA_ARGS__) |
| #define vmseq_vv_i32m1_b32_m | ( | ... | ) | __riscv_vmseq_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define vmseq_vv_i32m2_b16 | ( | ... | ) | __riscv_vmseq_vv_i32m2_b16(__VA_ARGS__) |
| #define vmseq_vv_i32m2_b16_m | ( | ... | ) | __riscv_vmseq_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define vmseq_vv_i32m4_b8 | ( | ... | ) | __riscv_vmseq_vv_i32m4_b8(__VA_ARGS__) |
| #define vmseq_vv_i32m4_b8_m | ( | ... | ) | __riscv_vmseq_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_i32m8_b4 | ( | ... | ) | __riscv_vmseq_vv_i32m8_b4(__VA_ARGS__) |
| #define vmseq_vv_i32m8_b4_m | ( | ... | ) | __riscv_vmseq_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define vmseq_vv_i32mf2_b64 | ( | ... | ) | __riscv_vmseq_vv_i32mf2_b64(__VA_ARGS__) |
| #define vmseq_vv_i32mf2_b64_m | ( | ... | ) | __riscv_vmseq_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmseq_vv_i64m1_b64 | ( | ... | ) | __riscv_vmseq_vv_i64m1_b64(__VA_ARGS__) |
| #define vmseq_vv_i64m1_b64_m | ( | ... | ) | __riscv_vmseq_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define vmseq_vv_i64m2_b32 | ( | ... | ) | __riscv_vmseq_vv_i64m2_b32(__VA_ARGS__) |
| #define vmseq_vv_i64m2_b32_m | ( | ... | ) | __riscv_vmseq_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define vmseq_vv_i64m4_b16 | ( | ... | ) | __riscv_vmseq_vv_i64m4_b16(__VA_ARGS__) |
| #define vmseq_vv_i64m4_b16_m | ( | ... | ) | __riscv_vmseq_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define vmseq_vv_i64m8_b8 | ( | ... | ) | __riscv_vmseq_vv_i64m8_b8(__VA_ARGS__) |
| #define vmseq_vv_i64m8_b8_m | ( | ... | ) | __riscv_vmseq_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_i8m1_b8 | ( | ... | ) | __riscv_vmseq_vv_i8m1_b8(__VA_ARGS__) |
| #define vmseq_vv_i8m1_b8_m | ( | ... | ) | __riscv_vmseq_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_i8m2_b4 | ( | ... | ) | __riscv_vmseq_vv_i8m2_b4(__VA_ARGS__) |
| #define vmseq_vv_i8m2_b4_m | ( | ... | ) | __riscv_vmseq_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define vmseq_vv_i8m4_b2 | ( | ... | ) | __riscv_vmseq_vv_i8m4_b2(__VA_ARGS__) |
| #define vmseq_vv_i8m4_b2_m | ( | ... | ) | __riscv_vmseq_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define vmseq_vv_i8m8_b1 | ( | ... | ) | __riscv_vmseq_vv_i8m8_b1(__VA_ARGS__) |
| #define vmseq_vv_i8m8_b1_m | ( | ... | ) | __riscv_vmseq_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define vmseq_vv_i8mf2_b16 | ( | ... | ) | __riscv_vmseq_vv_i8mf2_b16(__VA_ARGS__) |
| #define vmseq_vv_i8mf2_b16_m | ( | ... | ) | __riscv_vmseq_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmseq_vv_i8mf4_b32 | ( | ... | ) | __riscv_vmseq_vv_i8mf4_b32(__VA_ARGS__) |
| #define vmseq_vv_i8mf4_b32_m | ( | ... | ) | __riscv_vmseq_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmseq_vv_i8mf8_b64 | ( | ... | ) | __riscv_vmseq_vv_i8mf8_b64(__VA_ARGS__) |
| #define vmseq_vv_i8mf8_b64_m | ( | ... | ) | __riscv_vmseq_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmseq_vv_u16m1_b16 | ( | ... | ) | __riscv_vmseq_vv_u16m1_b16(__VA_ARGS__) |
| #define vmseq_vv_u16m1_b16_m | ( | ... | ) | __riscv_vmseq_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define vmseq_vv_u16m2_b8 | ( | ... | ) | __riscv_vmseq_vv_u16m2_b8(__VA_ARGS__) |
| #define vmseq_vv_u16m2_b8_m | ( | ... | ) | __riscv_vmseq_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_u16m4_b4 | ( | ... | ) | __riscv_vmseq_vv_u16m4_b4(__VA_ARGS__) |
| #define vmseq_vv_u16m4_b4_m | ( | ... | ) | __riscv_vmseq_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define vmseq_vv_u16m8_b2 | ( | ... | ) | __riscv_vmseq_vv_u16m8_b2(__VA_ARGS__) |
| #define vmseq_vv_u16m8_b2_m | ( | ... | ) | __riscv_vmseq_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define vmseq_vv_u16mf2_b32 | ( | ... | ) | __riscv_vmseq_vv_u16mf2_b32(__VA_ARGS__) |
| #define vmseq_vv_u16mf2_b32_m | ( | ... | ) | __riscv_vmseq_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmseq_vv_u16mf4_b64 | ( | ... | ) | __riscv_vmseq_vv_u16mf4_b64(__VA_ARGS__) |
| #define vmseq_vv_u16mf4_b64_m | ( | ... | ) | __riscv_vmseq_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmseq_vv_u32m1_b32 | ( | ... | ) | __riscv_vmseq_vv_u32m1_b32(__VA_ARGS__) |
| #define vmseq_vv_u32m1_b32_m | ( | ... | ) | __riscv_vmseq_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define vmseq_vv_u32m2_b16 | ( | ... | ) | __riscv_vmseq_vv_u32m2_b16(__VA_ARGS__) |
| #define vmseq_vv_u32m2_b16_m | ( | ... | ) | __riscv_vmseq_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define vmseq_vv_u32m4_b8 | ( | ... | ) | __riscv_vmseq_vv_u32m4_b8(__VA_ARGS__) |
| #define vmseq_vv_u32m4_b8_m | ( | ... | ) | __riscv_vmseq_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_u32m8_b4 | ( | ... | ) | __riscv_vmseq_vv_u32m8_b4(__VA_ARGS__) |
| #define vmseq_vv_u32m8_b4_m | ( | ... | ) | __riscv_vmseq_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define vmseq_vv_u32mf2_b64 | ( | ... | ) | __riscv_vmseq_vv_u32mf2_b64(__VA_ARGS__) |
| #define vmseq_vv_u32mf2_b64_m | ( | ... | ) | __riscv_vmseq_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmseq_vv_u64m1_b64 | ( | ... | ) | __riscv_vmseq_vv_u64m1_b64(__VA_ARGS__) |
| #define vmseq_vv_u64m1_b64_m | ( | ... | ) | __riscv_vmseq_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define vmseq_vv_u64m2_b32 | ( | ... | ) | __riscv_vmseq_vv_u64m2_b32(__VA_ARGS__) |
| #define vmseq_vv_u64m2_b32_m | ( | ... | ) | __riscv_vmseq_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define vmseq_vv_u64m4_b16 | ( | ... | ) | __riscv_vmseq_vv_u64m4_b16(__VA_ARGS__) |
| #define vmseq_vv_u64m4_b16_m | ( | ... | ) | __riscv_vmseq_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define vmseq_vv_u64m8_b8 | ( | ... | ) | __riscv_vmseq_vv_u64m8_b8(__VA_ARGS__) |
| #define vmseq_vv_u64m8_b8_m | ( | ... | ) | __riscv_vmseq_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_u8m1_b8 | ( | ... | ) | __riscv_vmseq_vv_u8m1_b8(__VA_ARGS__) |
| #define vmseq_vv_u8m1_b8_m | ( | ... | ) | __riscv_vmseq_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define vmseq_vv_u8m2_b4 | ( | ... | ) | __riscv_vmseq_vv_u8m2_b4(__VA_ARGS__) |
| #define vmseq_vv_u8m2_b4_m | ( | ... | ) | __riscv_vmseq_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define vmseq_vv_u8m4_b2 | ( | ... | ) | __riscv_vmseq_vv_u8m4_b2(__VA_ARGS__) |
| #define vmseq_vv_u8m4_b2_m | ( | ... | ) | __riscv_vmseq_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define vmseq_vv_u8m8_b1 | ( | ... | ) | __riscv_vmseq_vv_u8m8_b1(__VA_ARGS__) |
| #define vmseq_vv_u8m8_b1_m | ( | ... | ) | __riscv_vmseq_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define vmseq_vv_u8mf2_b16 | ( | ... | ) | __riscv_vmseq_vv_u8mf2_b16(__VA_ARGS__) |
| #define vmseq_vv_u8mf2_b16_m | ( | ... | ) | __riscv_vmseq_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmseq_vv_u8mf4_b32 | ( | ... | ) | __riscv_vmseq_vv_u8mf4_b32(__VA_ARGS__) |
| #define vmseq_vv_u8mf4_b32_m | ( | ... | ) | __riscv_vmseq_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmseq_vv_u8mf8_b64 | ( | ... | ) | __riscv_vmseq_vv_u8mf8_b64(__VA_ARGS__) |
| #define vmseq_vv_u8mf8_b64_m | ( | ... | ) | __riscv_vmseq_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmseq_vx_i16m1_b16 | ( | ... | ) | __riscv_vmseq_vx_i16m1_b16(__VA_ARGS__) |
| #define vmseq_vx_i16m1_b16_m | ( | ... | ) | __riscv_vmseq_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define vmseq_vx_i16m2_b8 | ( | ... | ) | __riscv_vmseq_vx_i16m2_b8(__VA_ARGS__) |
| #define vmseq_vx_i16m2_b8_m | ( | ... | ) | __riscv_vmseq_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define vmseq_vx_i16m4_b4 | ( | ... | ) | __riscv_vmseq_vx_i16m4_b4(__VA_ARGS__) |
| #define vmseq_vx_i16m4_b4_m | ( | ... | ) | __riscv_vmseq_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define vmseq_vx_i16m8_b2 | ( | ... | ) | __riscv_vmseq_vx_i16m8_b2(__VA_ARGS__) |
| #define vmseq_vx_i16m8_b2_m | ( | ... | ) | __riscv_vmseq_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define vmseq_vx_i16mf2_b32 | ( | ... | ) | __riscv_vmseq_vx_i16mf2_b32(__VA_ARGS__) |
| #define vmseq_vx_i16mf2_b32_m | ( | ... | ) | __riscv_vmseq_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmseq_vx_i16mf4_b64 | ( | ... | ) | __riscv_vmseq_vx_i16mf4_b64(__VA_ARGS__) |
| #define vmseq_vx_i16mf4_b64_m | ( | ... | ) | __riscv_vmseq_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmseq_vx_i32m1_b32 | ( | ... | ) | __riscv_vmseq_vx_i32m1_b32(__VA_ARGS__) |
| #define vmseq_vx_i32m1_b32_m | ( | ... | ) | __riscv_vmseq_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define vmseq_vx_i32m2_b16 | ( | ... | ) | __riscv_vmseq_vx_i32m2_b16(__VA_ARGS__) |
| #define vmseq_vx_i32m2_b16_m | ( | ... | ) | __riscv_vmseq_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define vmseq_vx_i32m4_b8 | ( | ... | ) | __riscv_vmseq_vx_i32m4_b8(__VA_ARGS__) |
| #define vmseq_vx_i32m4_b8_m | ( | ... | ) | __riscv_vmseq_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define vmseq_vx_i32m8_b4 | ( | ... | ) | __riscv_vmseq_vx_i32m8_b4(__VA_ARGS__) |
| #define vmseq_vx_i32m8_b4_m | ( | ... | ) | __riscv_vmseq_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define vmseq_vx_i32mf2_b64 | ( | ... | ) | __riscv_vmseq_vx_i32mf2_b64(__VA_ARGS__) |
| #define vmseq_vx_i32mf2_b64_m | ( | ... | ) | __riscv_vmseq_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmseq_vx_i64m1_b64 | ( | ... | ) | __riscv_vmseq_vx_i64m1_b64(__VA_ARGS__) |
| #define vmseq_vx_i64m1_b64_m | ( | ... | ) | __riscv_vmseq_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define vmseq_vx_i64m2_b32 | ( | ... | ) | __riscv_vmseq_vx_i64m2_b32(__VA_ARGS__) |
| #define vmseq_vx_i64m2_b32_m | ( | ... | ) | __riscv_vmseq_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define vmseq_vx_i64m4_b16 | ( | ... | ) | __riscv_vmseq_vx_i64m4_b16(__VA_ARGS__) |
| #define vmseq_vx_i64m4_b16_m | ( | ... | ) | __riscv_vmseq_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define vmseq_vx_i64m8_b8 | ( | ... | ) | __riscv_vmseq_vx_i64m8_b8(__VA_ARGS__) |
| #define vmseq_vx_i64m8_b8_m | ( | ... | ) | __riscv_vmseq_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define vmseq_vx_i8m1_b8 | ( | ... | ) | __riscv_vmseq_vx_i8m1_b8(__VA_ARGS__) |
| #define vmseq_vx_i8m1_b8_m | ( | ... | ) | __riscv_vmseq_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define vmseq_vx_i8m2_b4 | ( | ... | ) | __riscv_vmseq_vx_i8m2_b4(__VA_ARGS__) |
| #define vmseq_vx_i8m2_b4_m | ( | ... | ) | __riscv_vmseq_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define vmseq_vx_i8m4_b2 | ( | ... | ) | __riscv_vmseq_vx_i8m4_b2(__VA_ARGS__) |
| #define vmseq_vx_i8m4_b2_m | ( | ... | ) | __riscv_vmseq_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define vmseq_vx_i8m8_b1 | ( | ... | ) | __riscv_vmseq_vx_i8m8_b1(__VA_ARGS__) |
| #define vmseq_vx_i8m8_b1_m | ( | ... | ) | __riscv_vmseq_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define vmseq_vx_i8mf2_b16 | ( | ... | ) | __riscv_vmseq_vx_i8mf2_b16(__VA_ARGS__) |
| #define vmseq_vx_i8mf2_b16_m | ( | ... | ) | __riscv_vmseq_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmseq_vx_i8mf4_b32 | ( | ... | ) | __riscv_vmseq_vx_i8mf4_b32(__VA_ARGS__) |
| #define vmseq_vx_i8mf4_b32_m | ( | ... | ) | __riscv_vmseq_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmseq_vx_i8mf8_b64 | ( | ... | ) | __riscv_vmseq_vx_i8mf8_b64(__VA_ARGS__) |
| #define vmseq_vx_i8mf8_b64_m | ( | ... | ) | __riscv_vmseq_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmseq_vx_u16m1_b16 | ( | ... | ) | __riscv_vmseq_vx_u16m1_b16(__VA_ARGS__) |
| #define vmseq_vx_u16m1_b16_m | ( | ... | ) | __riscv_vmseq_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define vmseq_vx_u16m2_b8 | ( | ... | ) | __riscv_vmseq_vx_u16m2_b8(__VA_ARGS__) |
| #define vmseq_vx_u16m2_b8_m | ( | ... | ) | __riscv_vmseq_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define vmseq_vx_u16m4_b4 | ( | ... | ) | __riscv_vmseq_vx_u16m4_b4(__VA_ARGS__) |
| #define vmseq_vx_u16m4_b4_m | ( | ... | ) | __riscv_vmseq_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define vmseq_vx_u16m8_b2 | ( | ... | ) | __riscv_vmseq_vx_u16m8_b2(__VA_ARGS__) |
| #define vmseq_vx_u16m8_b2_m | ( | ... | ) | __riscv_vmseq_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define vmseq_vx_u16mf2_b32 | ( | ... | ) | __riscv_vmseq_vx_u16mf2_b32(__VA_ARGS__) |
| #define vmseq_vx_u16mf2_b32_m | ( | ... | ) | __riscv_vmseq_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmseq_vx_u16mf4_b64 | ( | ... | ) | __riscv_vmseq_vx_u16mf4_b64(__VA_ARGS__) |
| #define vmseq_vx_u16mf4_b64_m | ( | ... | ) | __riscv_vmseq_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmseq_vx_u32m1_b32 | ( | ... | ) | __riscv_vmseq_vx_u32m1_b32(__VA_ARGS__) |
| #define vmseq_vx_u32m1_b32_m | ( | ... | ) | __riscv_vmseq_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define vmseq_vx_u32m2_b16 | ( | ... | ) | __riscv_vmseq_vx_u32m2_b16(__VA_ARGS__) |
| #define vmseq_vx_u32m2_b16_m | ( | ... | ) | __riscv_vmseq_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define vmseq_vx_u32m4_b8 | ( | ... | ) | __riscv_vmseq_vx_u32m4_b8(__VA_ARGS__) |
| #define vmseq_vx_u32m4_b8_m | ( | ... | ) | __riscv_vmseq_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define vmseq_vx_u32m8_b4 | ( | ... | ) | __riscv_vmseq_vx_u32m8_b4(__VA_ARGS__) |
| #define vmseq_vx_u32m8_b4_m | ( | ... | ) | __riscv_vmseq_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define vmseq_vx_u32mf2_b64 | ( | ... | ) | __riscv_vmseq_vx_u32mf2_b64(__VA_ARGS__) |
| #define vmseq_vx_u32mf2_b64_m | ( | ... | ) | __riscv_vmseq_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmseq_vx_u64m1_b64 | ( | ... | ) | __riscv_vmseq_vx_u64m1_b64(__VA_ARGS__) |
| #define vmseq_vx_u64m1_b64_m | ( | ... | ) | __riscv_vmseq_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define vmseq_vx_u64m2_b32 | ( | ... | ) | __riscv_vmseq_vx_u64m2_b32(__VA_ARGS__) |
| #define vmseq_vx_u64m2_b32_m | ( | ... | ) | __riscv_vmseq_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define vmseq_vx_u64m4_b16 | ( | ... | ) | __riscv_vmseq_vx_u64m4_b16(__VA_ARGS__) |
| #define vmseq_vx_u64m4_b16_m | ( | ... | ) | __riscv_vmseq_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define vmseq_vx_u64m8_b8 | ( | ... | ) | __riscv_vmseq_vx_u64m8_b8(__VA_ARGS__) |
| #define vmseq_vx_u64m8_b8_m | ( | ... | ) | __riscv_vmseq_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define vmseq_vx_u8m1_b8 | ( | ... | ) | __riscv_vmseq_vx_u8m1_b8(__VA_ARGS__) |
| #define vmseq_vx_u8m1_b8_m | ( | ... | ) | __riscv_vmseq_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define vmseq_vx_u8m2_b4 | ( | ... | ) | __riscv_vmseq_vx_u8m2_b4(__VA_ARGS__) |
| #define vmseq_vx_u8m2_b4_m | ( | ... | ) | __riscv_vmseq_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define vmseq_vx_u8m4_b2 | ( | ... | ) | __riscv_vmseq_vx_u8m4_b2(__VA_ARGS__) |
| #define vmseq_vx_u8m4_b2_m | ( | ... | ) | __riscv_vmseq_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define vmseq_vx_u8m8_b1 | ( | ... | ) | __riscv_vmseq_vx_u8m8_b1(__VA_ARGS__) |
| #define vmseq_vx_u8m8_b1_m | ( | ... | ) | __riscv_vmseq_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define vmseq_vx_u8mf2_b16 | ( | ... | ) | __riscv_vmseq_vx_u8mf2_b16(__VA_ARGS__) |
| #define vmseq_vx_u8mf2_b16_m | ( | ... | ) | __riscv_vmseq_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmseq_vx_u8mf4_b32 | ( | ... | ) | __riscv_vmseq_vx_u8mf4_b32(__VA_ARGS__) |
| #define vmseq_vx_u8mf4_b32_m | ( | ... | ) | __riscv_vmseq_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmseq_vx_u8mf8_b64 | ( | ... | ) | __riscv_vmseq_vx_u8mf8_b64(__VA_ARGS__) |
| #define vmseq_vx_u8mf8_b64_m | ( | ... | ) | __riscv_vmseq_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmset_m_b1 | ( | ... | ) | __riscv_vmset_m_b1(__VA_ARGS__) |
| #define vmset_m_b16 | ( | ... | ) | __riscv_vmset_m_b16(__VA_ARGS__) |
| #define vmset_m_b2 | ( | ... | ) | __riscv_vmset_m_b2(__VA_ARGS__) |
| #define vmset_m_b32 | ( | ... | ) | __riscv_vmset_m_b32(__VA_ARGS__) |
| #define vmset_m_b4 | ( | ... | ) | __riscv_vmset_m_b4(__VA_ARGS__) |
| #define vmset_m_b64 | ( | ... | ) | __riscv_vmset_m_b64(__VA_ARGS__) |
| #define vmset_m_b8 | ( | ... | ) | __riscv_vmset_m_b8(__VA_ARGS__) |
| #define vmsge_vv_i16m1_b16 | ( | ... | ) | __riscv_vmsge_vv_i16m1_b16(__VA_ARGS__) |
| #define vmsge_vv_i16m1_b16_m | ( | ... | ) | __riscv_vmsge_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define vmsge_vv_i16m2_b8 | ( | ... | ) | __riscv_vmsge_vv_i16m2_b8(__VA_ARGS__) |
| #define vmsge_vv_i16m2_b8_m | ( | ... | ) | __riscv_vmsge_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define vmsge_vv_i16m4_b4 | ( | ... | ) | __riscv_vmsge_vv_i16m4_b4(__VA_ARGS__) |
| #define vmsge_vv_i16m4_b4_m | ( | ... | ) | __riscv_vmsge_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define vmsge_vv_i16m8_b2 | ( | ... | ) | __riscv_vmsge_vv_i16m8_b2(__VA_ARGS__) |
| #define vmsge_vv_i16m8_b2_m | ( | ... | ) | __riscv_vmsge_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define vmsge_vv_i16mf2_b32 | ( | ... | ) | __riscv_vmsge_vv_i16mf2_b32(__VA_ARGS__) |
| #define vmsge_vv_i16mf2_b32_m | ( | ... | ) | __riscv_vmsge_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmsge_vv_i16mf4_b64 | ( | ... | ) | __riscv_vmsge_vv_i16mf4_b64(__VA_ARGS__) |
| #define vmsge_vv_i16mf4_b64_m | ( | ... | ) | __riscv_vmsge_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmsge_vv_i32m1_b32 | ( | ... | ) | __riscv_vmsge_vv_i32m1_b32(__VA_ARGS__) |
| #define vmsge_vv_i32m1_b32_m | ( | ... | ) | __riscv_vmsge_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define vmsge_vv_i32m2_b16 | ( | ... | ) | __riscv_vmsge_vv_i32m2_b16(__VA_ARGS__) |
| #define vmsge_vv_i32m2_b16_m | ( | ... | ) | __riscv_vmsge_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define vmsge_vv_i32m4_b8 | ( | ... | ) | __riscv_vmsge_vv_i32m4_b8(__VA_ARGS__) |
| #define vmsge_vv_i32m4_b8_m | ( | ... | ) | __riscv_vmsge_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define vmsge_vv_i32m8_b4 | ( | ... | ) | __riscv_vmsge_vv_i32m8_b4(__VA_ARGS__) |
| #define vmsge_vv_i32m8_b4_m | ( | ... | ) | __riscv_vmsge_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define vmsge_vv_i32mf2_b64 | ( | ... | ) | __riscv_vmsge_vv_i32mf2_b64(__VA_ARGS__) |
| #define vmsge_vv_i32mf2_b64_m | ( | ... | ) | __riscv_vmsge_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmsge_vv_i64m1_b64 | ( | ... | ) | __riscv_vmsge_vv_i64m1_b64(__VA_ARGS__) |
| #define vmsge_vv_i64m1_b64_m | ( | ... | ) | __riscv_vmsge_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define vmsge_vv_i64m2_b32 | ( | ... | ) | __riscv_vmsge_vv_i64m2_b32(__VA_ARGS__) |
| #define vmsge_vv_i64m2_b32_m | ( | ... | ) | __riscv_vmsge_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define vmsge_vv_i64m4_b16 | ( | ... | ) | __riscv_vmsge_vv_i64m4_b16(__VA_ARGS__) |
| #define vmsge_vv_i64m4_b16_m | ( | ... | ) | __riscv_vmsge_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define vmsge_vv_i64m8_b8 | ( | ... | ) | __riscv_vmsge_vv_i64m8_b8(__VA_ARGS__) |
| #define vmsge_vv_i64m8_b8_m | ( | ... | ) | __riscv_vmsge_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define vmsge_vv_i8m1_b8 | ( | ... | ) | __riscv_vmsge_vv_i8m1_b8(__VA_ARGS__) |
| #define vmsge_vv_i8m1_b8_m | ( | ... | ) | __riscv_vmsge_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define vmsge_vv_i8m2_b4 | ( | ... | ) | __riscv_vmsge_vv_i8m2_b4(__VA_ARGS__) |
| #define vmsge_vv_i8m2_b4_m | ( | ... | ) | __riscv_vmsge_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define vmsge_vv_i8m4_b2 | ( | ... | ) | __riscv_vmsge_vv_i8m4_b2(__VA_ARGS__) |
| #define vmsge_vv_i8m4_b2_m | ( | ... | ) | __riscv_vmsge_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define vmsge_vv_i8m8_b1 | ( | ... | ) | __riscv_vmsge_vv_i8m8_b1(__VA_ARGS__) |
| #define vmsge_vv_i8m8_b1_m | ( | ... | ) | __riscv_vmsge_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define vmsge_vv_i8mf2_b16 | ( | ... | ) | __riscv_vmsge_vv_i8mf2_b16(__VA_ARGS__) |
| #define vmsge_vv_i8mf2_b16_m | ( | ... | ) | __riscv_vmsge_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmsge_vv_i8mf4_b32 | ( | ... | ) | __riscv_vmsge_vv_i8mf4_b32(__VA_ARGS__) |
| #define vmsge_vv_i8mf4_b32_m | ( | ... | ) | __riscv_vmsge_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmsge_vv_i8mf8_b64 | ( | ... | ) | __riscv_vmsge_vv_i8mf8_b64(__VA_ARGS__) |
| #define vmsge_vv_i8mf8_b64_m | ( | ... | ) | __riscv_vmsge_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsge_vx_i16m1_b16 | ( | ... | ) | __riscv_vmsge_vx_i16m1_b16(__VA_ARGS__) |
| #define vmsge_vx_i16m1_b16_m | ( | ... | ) | __riscv_vmsge_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define vmsge_vx_i16m2_b8 | ( | ... | ) | __riscv_vmsge_vx_i16m2_b8(__VA_ARGS__) |
| #define vmsge_vx_i16m2_b8_m | ( | ... | ) | __riscv_vmsge_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define vmsge_vx_i16m4_b4 | ( | ... | ) | __riscv_vmsge_vx_i16m4_b4(__VA_ARGS__) |
| #define vmsge_vx_i16m4_b4_m | ( | ... | ) | __riscv_vmsge_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define vmsge_vx_i16m8_b2 | ( | ... | ) | __riscv_vmsge_vx_i16m8_b2(__VA_ARGS__) |
| #define vmsge_vx_i16m8_b2_m | ( | ... | ) | __riscv_vmsge_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define vmsge_vx_i16mf2_b32 | ( | ... | ) | __riscv_vmsge_vx_i16mf2_b32(__VA_ARGS__) |
| #define vmsge_vx_i16mf2_b32_m | ( | ... | ) | __riscv_vmsge_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmsge_vx_i16mf4_b64 | ( | ... | ) | __riscv_vmsge_vx_i16mf4_b64(__VA_ARGS__) |
| #define vmsge_vx_i16mf4_b64_m | ( | ... | ) | __riscv_vmsge_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmsge_vx_i32m1_b32 | ( | ... | ) | __riscv_vmsge_vx_i32m1_b32(__VA_ARGS__) |
| #define vmsge_vx_i32m1_b32_m | ( | ... | ) | __riscv_vmsge_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define vmsge_vx_i32m2_b16 | ( | ... | ) | __riscv_vmsge_vx_i32m2_b16(__VA_ARGS__) |
| #define vmsge_vx_i32m2_b16_m | ( | ... | ) | __riscv_vmsge_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define vmsge_vx_i32m4_b8 | ( | ... | ) | __riscv_vmsge_vx_i32m4_b8(__VA_ARGS__) |
| #define vmsge_vx_i32m4_b8_m | ( | ... | ) | __riscv_vmsge_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define vmsge_vx_i32m8_b4 | ( | ... | ) | __riscv_vmsge_vx_i32m8_b4(__VA_ARGS__) |
| #define vmsge_vx_i32m8_b4_m | ( | ... | ) | __riscv_vmsge_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define vmsge_vx_i32mf2_b64 | ( | ... | ) | __riscv_vmsge_vx_i32mf2_b64(__VA_ARGS__) |
| #define vmsge_vx_i32mf2_b64_m | ( | ... | ) | __riscv_vmsge_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmsge_vx_i64m1_b64 | ( | ... | ) | __riscv_vmsge_vx_i64m1_b64(__VA_ARGS__) |
| #define vmsge_vx_i64m1_b64_m | ( | ... | ) | __riscv_vmsge_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define vmsge_vx_i64m2_b32 | ( | ... | ) | __riscv_vmsge_vx_i64m2_b32(__VA_ARGS__) |
| #define vmsge_vx_i64m2_b32_m | ( | ... | ) | __riscv_vmsge_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define vmsge_vx_i64m4_b16 | ( | ... | ) | __riscv_vmsge_vx_i64m4_b16(__VA_ARGS__) |
| #define vmsge_vx_i64m4_b16_m | ( | ... | ) | __riscv_vmsge_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define vmsge_vx_i64m8_b8 | ( | ... | ) | __riscv_vmsge_vx_i64m8_b8(__VA_ARGS__) |
| #define vmsge_vx_i64m8_b8_m | ( | ... | ) | __riscv_vmsge_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define vmsge_vx_i8m1_b8 | ( | ... | ) | __riscv_vmsge_vx_i8m1_b8(__VA_ARGS__) |
| #define vmsge_vx_i8m1_b8_m | ( | ... | ) | __riscv_vmsge_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define vmsge_vx_i8m2_b4 | ( | ... | ) | __riscv_vmsge_vx_i8m2_b4(__VA_ARGS__) |
| #define vmsge_vx_i8m2_b4_m | ( | ... | ) | __riscv_vmsge_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define vmsge_vx_i8m4_b2 | ( | ... | ) | __riscv_vmsge_vx_i8m4_b2(__VA_ARGS__) |
| #define vmsge_vx_i8m4_b2_m | ( | ... | ) | __riscv_vmsge_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define vmsge_vx_i8m8_b1 | ( | ... | ) | __riscv_vmsge_vx_i8m8_b1(__VA_ARGS__) |
| #define vmsge_vx_i8m8_b1_m | ( | ... | ) | __riscv_vmsge_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define vmsge_vx_i8mf2_b16 | ( | ... | ) | __riscv_vmsge_vx_i8mf2_b16(__VA_ARGS__) |
| #define vmsge_vx_i8mf2_b16_m | ( | ... | ) | __riscv_vmsge_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmsge_vx_i8mf4_b32 | ( | ... | ) | __riscv_vmsge_vx_i8mf4_b32(__VA_ARGS__) |
| #define vmsge_vx_i8mf4_b32_m | ( | ... | ) | __riscv_vmsge_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmsge_vx_i8mf8_b64 | ( | ... | ) | __riscv_vmsge_vx_i8mf8_b64(__VA_ARGS__) |
| #define vmsge_vx_i8mf8_b64_m | ( | ... | ) | __riscv_vmsge_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u16m1_b16 | ( | ... | ) | __riscv_vmsgeu_vv_u16m1_b16(__VA_ARGS__) |
| #define vmsgeu_vv_u16m1_b16_m | ( | ... | ) | __riscv_vmsgeu_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u16m2_b8 | ( | ... | ) | __riscv_vmsgeu_vv_u16m2_b8(__VA_ARGS__) |
| #define vmsgeu_vv_u16m2_b8_m | ( | ... | ) | __riscv_vmsgeu_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u16m4_b4 | ( | ... | ) | __riscv_vmsgeu_vv_u16m4_b4(__VA_ARGS__) |
| #define vmsgeu_vv_u16m4_b4_m | ( | ... | ) | __riscv_vmsgeu_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u16m8_b2 | ( | ... | ) | __riscv_vmsgeu_vv_u16m8_b2(__VA_ARGS__) |
| #define vmsgeu_vv_u16m8_b2_m | ( | ... | ) | __riscv_vmsgeu_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u16mf2_b32 | ( | ... | ) | __riscv_vmsgeu_vv_u16mf2_b32(__VA_ARGS__) |
| #define vmsgeu_vv_u16mf2_b32_m | ( | ... | ) | __riscv_vmsgeu_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u16mf4_b64 | ( | ... | ) | __riscv_vmsgeu_vv_u16mf4_b64(__VA_ARGS__) |
| #define vmsgeu_vv_u16mf4_b64_m | ( | ... | ) | __riscv_vmsgeu_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u32m1_b32 | ( | ... | ) | __riscv_vmsgeu_vv_u32m1_b32(__VA_ARGS__) |
| #define vmsgeu_vv_u32m1_b32_m | ( | ... | ) | __riscv_vmsgeu_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u32m2_b16 | ( | ... | ) | __riscv_vmsgeu_vv_u32m2_b16(__VA_ARGS__) |
| #define vmsgeu_vv_u32m2_b16_m | ( | ... | ) | __riscv_vmsgeu_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u32m4_b8 | ( | ... | ) | __riscv_vmsgeu_vv_u32m4_b8(__VA_ARGS__) |
| #define vmsgeu_vv_u32m4_b8_m | ( | ... | ) | __riscv_vmsgeu_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u32m8_b4 | ( | ... | ) | __riscv_vmsgeu_vv_u32m8_b4(__VA_ARGS__) |
| #define vmsgeu_vv_u32m8_b4_m | ( | ... | ) | __riscv_vmsgeu_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u32mf2_b64 | ( | ... | ) | __riscv_vmsgeu_vv_u32mf2_b64(__VA_ARGS__) |
| #define vmsgeu_vv_u32mf2_b64_m | ( | ... | ) | __riscv_vmsgeu_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u64m1_b64 | ( | ... | ) | __riscv_vmsgeu_vv_u64m1_b64(__VA_ARGS__) |
| #define vmsgeu_vv_u64m1_b64_m | ( | ... | ) | __riscv_vmsgeu_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u64m2_b32 | ( | ... | ) | __riscv_vmsgeu_vv_u64m2_b32(__VA_ARGS__) |
| #define vmsgeu_vv_u64m2_b32_m | ( | ... | ) | __riscv_vmsgeu_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u64m4_b16 | ( | ... | ) | __riscv_vmsgeu_vv_u64m4_b16(__VA_ARGS__) |
| #define vmsgeu_vv_u64m4_b16_m | ( | ... | ) | __riscv_vmsgeu_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u64m8_b8 | ( | ... | ) | __riscv_vmsgeu_vv_u64m8_b8(__VA_ARGS__) |
| #define vmsgeu_vv_u64m8_b8_m | ( | ... | ) | __riscv_vmsgeu_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u8m1_b8 | ( | ... | ) | __riscv_vmsgeu_vv_u8m1_b8(__VA_ARGS__) |
| #define vmsgeu_vv_u8m1_b8_m | ( | ... | ) | __riscv_vmsgeu_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u8m2_b4 | ( | ... | ) | __riscv_vmsgeu_vv_u8m2_b4(__VA_ARGS__) |
| #define vmsgeu_vv_u8m2_b4_m | ( | ... | ) | __riscv_vmsgeu_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u8m4_b2 | ( | ... | ) | __riscv_vmsgeu_vv_u8m4_b2(__VA_ARGS__) |
| #define vmsgeu_vv_u8m4_b2_m | ( | ... | ) | __riscv_vmsgeu_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u8m8_b1 | ( | ... | ) | __riscv_vmsgeu_vv_u8m8_b1(__VA_ARGS__) |
| #define vmsgeu_vv_u8m8_b1_m | ( | ... | ) | __riscv_vmsgeu_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u8mf2_b16 | ( | ... | ) | __riscv_vmsgeu_vv_u8mf2_b16(__VA_ARGS__) |
| #define vmsgeu_vv_u8mf2_b16_m | ( | ... | ) | __riscv_vmsgeu_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u8mf4_b32 | ( | ... | ) | __riscv_vmsgeu_vv_u8mf4_b32(__VA_ARGS__) |
| #define vmsgeu_vv_u8mf4_b32_m | ( | ... | ) | __riscv_vmsgeu_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsgeu_vv_u8mf8_b64 | ( | ... | ) | __riscv_vmsgeu_vv_u8mf8_b64(__VA_ARGS__) |
| #define vmsgeu_vv_u8mf8_b64_m | ( | ... | ) | __riscv_vmsgeu_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u16m1_b16 | ( | ... | ) | __riscv_vmsgeu_vx_u16m1_b16(__VA_ARGS__) |
| #define vmsgeu_vx_u16m1_b16_m | ( | ... | ) | __riscv_vmsgeu_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u16m2_b8 | ( | ... | ) | __riscv_vmsgeu_vx_u16m2_b8(__VA_ARGS__) |
| #define vmsgeu_vx_u16m2_b8_m | ( | ... | ) | __riscv_vmsgeu_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u16m4_b4 | ( | ... | ) | __riscv_vmsgeu_vx_u16m4_b4(__VA_ARGS__) |
| #define vmsgeu_vx_u16m4_b4_m | ( | ... | ) | __riscv_vmsgeu_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u16m8_b2 | ( | ... | ) | __riscv_vmsgeu_vx_u16m8_b2(__VA_ARGS__) |
| #define vmsgeu_vx_u16m8_b2_m | ( | ... | ) | __riscv_vmsgeu_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u16mf2_b32 | ( | ... | ) | __riscv_vmsgeu_vx_u16mf2_b32(__VA_ARGS__) |
| #define vmsgeu_vx_u16mf2_b32_m | ( | ... | ) | __riscv_vmsgeu_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u16mf4_b64 | ( | ... | ) | __riscv_vmsgeu_vx_u16mf4_b64(__VA_ARGS__) |
| #define vmsgeu_vx_u16mf4_b64_m | ( | ... | ) | __riscv_vmsgeu_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u32m1_b32 | ( | ... | ) | __riscv_vmsgeu_vx_u32m1_b32(__VA_ARGS__) |
| #define vmsgeu_vx_u32m1_b32_m | ( | ... | ) | __riscv_vmsgeu_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u32m2_b16 | ( | ... | ) | __riscv_vmsgeu_vx_u32m2_b16(__VA_ARGS__) |
| #define vmsgeu_vx_u32m2_b16_m | ( | ... | ) | __riscv_vmsgeu_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u32m4_b8 | ( | ... | ) | __riscv_vmsgeu_vx_u32m4_b8(__VA_ARGS__) |
| #define vmsgeu_vx_u32m4_b8_m | ( | ... | ) | __riscv_vmsgeu_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u32m8_b4 | ( | ... | ) | __riscv_vmsgeu_vx_u32m8_b4(__VA_ARGS__) |
| #define vmsgeu_vx_u32m8_b4_m | ( | ... | ) | __riscv_vmsgeu_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u32mf2_b64 | ( | ... | ) | __riscv_vmsgeu_vx_u32mf2_b64(__VA_ARGS__) |
| #define vmsgeu_vx_u32mf2_b64_m | ( | ... | ) | __riscv_vmsgeu_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u64m1_b64 | ( | ... | ) | __riscv_vmsgeu_vx_u64m1_b64(__VA_ARGS__) |
| #define vmsgeu_vx_u64m1_b64_m | ( | ... | ) | __riscv_vmsgeu_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u64m2_b32 | ( | ... | ) | __riscv_vmsgeu_vx_u64m2_b32(__VA_ARGS__) |
| #define vmsgeu_vx_u64m2_b32_m | ( | ... | ) | __riscv_vmsgeu_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u64m4_b16 | ( | ... | ) | __riscv_vmsgeu_vx_u64m4_b16(__VA_ARGS__) |
| #define vmsgeu_vx_u64m4_b16_m | ( | ... | ) | __riscv_vmsgeu_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u64m8_b8 | ( | ... | ) | __riscv_vmsgeu_vx_u64m8_b8(__VA_ARGS__) |
| #define vmsgeu_vx_u64m8_b8_m | ( | ... | ) | __riscv_vmsgeu_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u8m1_b8 | ( | ... | ) | __riscv_vmsgeu_vx_u8m1_b8(__VA_ARGS__) |
| #define vmsgeu_vx_u8m1_b8_m | ( | ... | ) | __riscv_vmsgeu_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u8m2_b4 | ( | ... | ) | __riscv_vmsgeu_vx_u8m2_b4(__VA_ARGS__) |
| #define vmsgeu_vx_u8m2_b4_m | ( | ... | ) | __riscv_vmsgeu_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u8m4_b2 | ( | ... | ) | __riscv_vmsgeu_vx_u8m4_b2(__VA_ARGS__) |
| #define vmsgeu_vx_u8m4_b2_m | ( | ... | ) | __riscv_vmsgeu_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u8m8_b1 | ( | ... | ) | __riscv_vmsgeu_vx_u8m8_b1(__VA_ARGS__) |
| #define vmsgeu_vx_u8m8_b1_m | ( | ... | ) | __riscv_vmsgeu_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u8mf2_b16 | ( | ... | ) | __riscv_vmsgeu_vx_u8mf2_b16(__VA_ARGS__) |
| #define vmsgeu_vx_u8mf2_b16_m | ( | ... | ) | __riscv_vmsgeu_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u8mf4_b32 | ( | ... | ) | __riscv_vmsgeu_vx_u8mf4_b32(__VA_ARGS__) |
| #define vmsgeu_vx_u8mf4_b32_m | ( | ... | ) | __riscv_vmsgeu_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsgeu_vx_u8mf8_b64 | ( | ... | ) | __riscv_vmsgeu_vx_u8mf8_b64(__VA_ARGS__) |
| #define vmsgeu_vx_u8mf8_b64_m | ( | ... | ) | __riscv_vmsgeu_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsgt_vv_i16m1_b16 | ( | ... | ) | __riscv_vmsgt_vv_i16m1_b16(__VA_ARGS__) |
| #define vmsgt_vv_i16m1_b16_m | ( | ... | ) | __riscv_vmsgt_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define vmsgt_vv_i16m2_b8 | ( | ... | ) | __riscv_vmsgt_vv_i16m2_b8(__VA_ARGS__) |
| #define vmsgt_vv_i16m2_b8_m | ( | ... | ) | __riscv_vmsgt_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define vmsgt_vv_i16m4_b4 | ( | ... | ) | __riscv_vmsgt_vv_i16m4_b4(__VA_ARGS__) |
| #define vmsgt_vv_i16m4_b4_m | ( | ... | ) | __riscv_vmsgt_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define vmsgt_vv_i16m8_b2 | ( | ... | ) | __riscv_vmsgt_vv_i16m8_b2(__VA_ARGS__) |
| #define vmsgt_vv_i16m8_b2_m | ( | ... | ) | __riscv_vmsgt_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define vmsgt_vv_i16mf2_b32 | ( | ... | ) | __riscv_vmsgt_vv_i16mf2_b32(__VA_ARGS__) |
| #define vmsgt_vv_i16mf2_b32_m | ( | ... | ) | __riscv_vmsgt_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmsgt_vv_i16mf4_b64 | ( | ... | ) | __riscv_vmsgt_vv_i16mf4_b64(__VA_ARGS__) |
| #define vmsgt_vv_i16mf4_b64_m | ( | ... | ) | __riscv_vmsgt_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmsgt_vv_i32m1_b32 | ( | ... | ) | __riscv_vmsgt_vv_i32m1_b32(__VA_ARGS__) |
| #define vmsgt_vv_i32m1_b32_m | ( | ... | ) | __riscv_vmsgt_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define vmsgt_vv_i32m2_b16 | ( | ... | ) | __riscv_vmsgt_vv_i32m2_b16(__VA_ARGS__) |
| #define vmsgt_vv_i32m2_b16_m | ( | ... | ) | __riscv_vmsgt_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define vmsgt_vv_i32m4_b8 | ( | ... | ) | __riscv_vmsgt_vv_i32m4_b8(__VA_ARGS__) |
| #define vmsgt_vv_i32m4_b8_m | ( | ... | ) | __riscv_vmsgt_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define vmsgt_vv_i32m8_b4 | ( | ... | ) | __riscv_vmsgt_vv_i32m8_b4(__VA_ARGS__) |
| #define vmsgt_vv_i32m8_b4_m | ( | ... | ) | __riscv_vmsgt_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define vmsgt_vv_i32mf2_b64 | ( | ... | ) | __riscv_vmsgt_vv_i32mf2_b64(__VA_ARGS__) |
| #define vmsgt_vv_i32mf2_b64_m | ( | ... | ) | __riscv_vmsgt_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmsgt_vv_i64m1_b64 | ( | ... | ) | __riscv_vmsgt_vv_i64m1_b64(__VA_ARGS__) |
| #define vmsgt_vv_i64m1_b64_m | ( | ... | ) | __riscv_vmsgt_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define vmsgt_vv_i64m2_b32 | ( | ... | ) | __riscv_vmsgt_vv_i64m2_b32(__VA_ARGS__) |
| #define vmsgt_vv_i64m2_b32_m | ( | ... | ) | __riscv_vmsgt_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define vmsgt_vv_i64m4_b16 | ( | ... | ) | __riscv_vmsgt_vv_i64m4_b16(__VA_ARGS__) |
| #define vmsgt_vv_i64m4_b16_m | ( | ... | ) | __riscv_vmsgt_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define vmsgt_vv_i64m8_b8 | ( | ... | ) | __riscv_vmsgt_vv_i64m8_b8(__VA_ARGS__) |
| #define vmsgt_vv_i64m8_b8_m | ( | ... | ) | __riscv_vmsgt_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define vmsgt_vv_i8m1_b8 | ( | ... | ) | __riscv_vmsgt_vv_i8m1_b8(__VA_ARGS__) |
| #define vmsgt_vv_i8m1_b8_m | ( | ... | ) | __riscv_vmsgt_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define vmsgt_vv_i8m2_b4 | ( | ... | ) | __riscv_vmsgt_vv_i8m2_b4(__VA_ARGS__) |
| #define vmsgt_vv_i8m2_b4_m | ( | ... | ) | __riscv_vmsgt_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define vmsgt_vv_i8m4_b2 | ( | ... | ) | __riscv_vmsgt_vv_i8m4_b2(__VA_ARGS__) |
| #define vmsgt_vv_i8m4_b2_m | ( | ... | ) | __riscv_vmsgt_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define vmsgt_vv_i8m8_b1 | ( | ... | ) | __riscv_vmsgt_vv_i8m8_b1(__VA_ARGS__) |
| #define vmsgt_vv_i8m8_b1_m | ( | ... | ) | __riscv_vmsgt_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define vmsgt_vv_i8mf2_b16 | ( | ... | ) | __riscv_vmsgt_vv_i8mf2_b16(__VA_ARGS__) |
| #define vmsgt_vv_i8mf2_b16_m | ( | ... | ) | __riscv_vmsgt_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmsgt_vv_i8mf4_b32 | ( | ... | ) | __riscv_vmsgt_vv_i8mf4_b32(__VA_ARGS__) |
| #define vmsgt_vv_i8mf4_b32_m | ( | ... | ) | __riscv_vmsgt_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmsgt_vv_i8mf8_b64 | ( | ... | ) | __riscv_vmsgt_vv_i8mf8_b64(__VA_ARGS__) |
| #define vmsgt_vv_i8mf8_b64_m | ( | ... | ) | __riscv_vmsgt_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsgt_vx_i16m1_b16 | ( | ... | ) | __riscv_vmsgt_vx_i16m1_b16(__VA_ARGS__) |
| #define vmsgt_vx_i16m1_b16_m | ( | ... | ) | __riscv_vmsgt_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define vmsgt_vx_i16m2_b8 | ( | ... | ) | __riscv_vmsgt_vx_i16m2_b8(__VA_ARGS__) |
| #define vmsgt_vx_i16m2_b8_m | ( | ... | ) | __riscv_vmsgt_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define vmsgt_vx_i16m4_b4 | ( | ... | ) | __riscv_vmsgt_vx_i16m4_b4(__VA_ARGS__) |
| #define vmsgt_vx_i16m4_b4_m | ( | ... | ) | __riscv_vmsgt_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define vmsgt_vx_i16m8_b2 | ( | ... | ) | __riscv_vmsgt_vx_i16m8_b2(__VA_ARGS__) |
| #define vmsgt_vx_i16m8_b2_m | ( | ... | ) | __riscv_vmsgt_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define vmsgt_vx_i16mf2_b32 | ( | ... | ) | __riscv_vmsgt_vx_i16mf2_b32(__VA_ARGS__) |
| #define vmsgt_vx_i16mf2_b32_m | ( | ... | ) | __riscv_vmsgt_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmsgt_vx_i16mf4_b64 | ( | ... | ) | __riscv_vmsgt_vx_i16mf4_b64(__VA_ARGS__) |
| #define vmsgt_vx_i16mf4_b64_m | ( | ... | ) | __riscv_vmsgt_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmsgt_vx_i32m1_b32 | ( | ... | ) | __riscv_vmsgt_vx_i32m1_b32(__VA_ARGS__) |
| #define vmsgt_vx_i32m1_b32_m | ( | ... | ) | __riscv_vmsgt_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define vmsgt_vx_i32m2_b16 | ( | ... | ) | __riscv_vmsgt_vx_i32m2_b16(__VA_ARGS__) |
| #define vmsgt_vx_i32m2_b16_m | ( | ... | ) | __riscv_vmsgt_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define vmsgt_vx_i32m4_b8 | ( | ... | ) | __riscv_vmsgt_vx_i32m4_b8(__VA_ARGS__) |
| #define vmsgt_vx_i32m4_b8_m | ( | ... | ) | __riscv_vmsgt_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define vmsgt_vx_i32m8_b4 | ( | ... | ) | __riscv_vmsgt_vx_i32m8_b4(__VA_ARGS__) |
| #define vmsgt_vx_i32m8_b4_m | ( | ... | ) | __riscv_vmsgt_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define vmsgt_vx_i32mf2_b64 | ( | ... | ) | __riscv_vmsgt_vx_i32mf2_b64(__VA_ARGS__) |
| #define vmsgt_vx_i32mf2_b64_m | ( | ... | ) | __riscv_vmsgt_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmsgt_vx_i64m1_b64 | ( | ... | ) | __riscv_vmsgt_vx_i64m1_b64(__VA_ARGS__) |
| #define vmsgt_vx_i64m1_b64_m | ( | ... | ) | __riscv_vmsgt_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define vmsgt_vx_i64m2_b32 | ( | ... | ) | __riscv_vmsgt_vx_i64m2_b32(__VA_ARGS__) |
| #define vmsgt_vx_i64m2_b32_m | ( | ... | ) | __riscv_vmsgt_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define vmsgt_vx_i64m4_b16 | ( | ... | ) | __riscv_vmsgt_vx_i64m4_b16(__VA_ARGS__) |
| #define vmsgt_vx_i64m4_b16_m | ( | ... | ) | __riscv_vmsgt_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define vmsgt_vx_i64m8_b8 | ( | ... | ) | __riscv_vmsgt_vx_i64m8_b8(__VA_ARGS__) |
| #define vmsgt_vx_i64m8_b8_m | ( | ... | ) | __riscv_vmsgt_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define vmsgt_vx_i8m1_b8 | ( | ... | ) | __riscv_vmsgt_vx_i8m1_b8(__VA_ARGS__) |
| #define vmsgt_vx_i8m1_b8_m | ( | ... | ) | __riscv_vmsgt_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define vmsgt_vx_i8m2_b4 | ( | ... | ) | __riscv_vmsgt_vx_i8m2_b4(__VA_ARGS__) |
| #define vmsgt_vx_i8m2_b4_m | ( | ... | ) | __riscv_vmsgt_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define vmsgt_vx_i8m4_b2 | ( | ... | ) | __riscv_vmsgt_vx_i8m4_b2(__VA_ARGS__) |
| #define vmsgt_vx_i8m4_b2_m | ( | ... | ) | __riscv_vmsgt_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define vmsgt_vx_i8m8_b1 | ( | ... | ) | __riscv_vmsgt_vx_i8m8_b1(__VA_ARGS__) |
| #define vmsgt_vx_i8m8_b1_m | ( | ... | ) | __riscv_vmsgt_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define vmsgt_vx_i8mf2_b16 | ( | ... | ) | __riscv_vmsgt_vx_i8mf2_b16(__VA_ARGS__) |
| #define vmsgt_vx_i8mf2_b16_m | ( | ... | ) | __riscv_vmsgt_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmsgt_vx_i8mf4_b32 | ( | ... | ) | __riscv_vmsgt_vx_i8mf4_b32(__VA_ARGS__) |
| #define vmsgt_vx_i8mf4_b32_m | ( | ... | ) | __riscv_vmsgt_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmsgt_vx_i8mf8_b64 | ( | ... | ) | __riscv_vmsgt_vx_i8mf8_b64(__VA_ARGS__) |
| #define vmsgt_vx_i8mf8_b64_m | ( | ... | ) | __riscv_vmsgt_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u16m1_b16 | ( | ... | ) | __riscv_vmsgtu_vv_u16m1_b16(__VA_ARGS__) |
| #define vmsgtu_vv_u16m1_b16_m | ( | ... | ) | __riscv_vmsgtu_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u16m2_b8 | ( | ... | ) | __riscv_vmsgtu_vv_u16m2_b8(__VA_ARGS__) |
| #define vmsgtu_vv_u16m2_b8_m | ( | ... | ) | __riscv_vmsgtu_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u16m4_b4 | ( | ... | ) | __riscv_vmsgtu_vv_u16m4_b4(__VA_ARGS__) |
| #define vmsgtu_vv_u16m4_b4_m | ( | ... | ) | __riscv_vmsgtu_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u16m8_b2 | ( | ... | ) | __riscv_vmsgtu_vv_u16m8_b2(__VA_ARGS__) |
| #define vmsgtu_vv_u16m8_b2_m | ( | ... | ) | __riscv_vmsgtu_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u16mf2_b32 | ( | ... | ) | __riscv_vmsgtu_vv_u16mf2_b32(__VA_ARGS__) |
| #define vmsgtu_vv_u16mf2_b32_m | ( | ... | ) | __riscv_vmsgtu_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u16mf4_b64 | ( | ... | ) | __riscv_vmsgtu_vv_u16mf4_b64(__VA_ARGS__) |
| #define vmsgtu_vv_u16mf4_b64_m | ( | ... | ) | __riscv_vmsgtu_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u32m1_b32 | ( | ... | ) | __riscv_vmsgtu_vv_u32m1_b32(__VA_ARGS__) |
| #define vmsgtu_vv_u32m1_b32_m | ( | ... | ) | __riscv_vmsgtu_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u32m2_b16 | ( | ... | ) | __riscv_vmsgtu_vv_u32m2_b16(__VA_ARGS__) |
| #define vmsgtu_vv_u32m2_b16_m | ( | ... | ) | __riscv_vmsgtu_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u32m4_b8 | ( | ... | ) | __riscv_vmsgtu_vv_u32m4_b8(__VA_ARGS__) |
| #define vmsgtu_vv_u32m4_b8_m | ( | ... | ) | __riscv_vmsgtu_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u32m8_b4 | ( | ... | ) | __riscv_vmsgtu_vv_u32m8_b4(__VA_ARGS__) |
| #define vmsgtu_vv_u32m8_b4_m | ( | ... | ) | __riscv_vmsgtu_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u32mf2_b64 | ( | ... | ) | __riscv_vmsgtu_vv_u32mf2_b64(__VA_ARGS__) |
| #define vmsgtu_vv_u32mf2_b64_m | ( | ... | ) | __riscv_vmsgtu_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u64m1_b64 | ( | ... | ) | __riscv_vmsgtu_vv_u64m1_b64(__VA_ARGS__) |
| #define vmsgtu_vv_u64m1_b64_m | ( | ... | ) | __riscv_vmsgtu_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u64m2_b32 | ( | ... | ) | __riscv_vmsgtu_vv_u64m2_b32(__VA_ARGS__) |
| #define vmsgtu_vv_u64m2_b32_m | ( | ... | ) | __riscv_vmsgtu_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u64m4_b16 | ( | ... | ) | __riscv_vmsgtu_vv_u64m4_b16(__VA_ARGS__) |
| #define vmsgtu_vv_u64m4_b16_m | ( | ... | ) | __riscv_vmsgtu_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u64m8_b8 | ( | ... | ) | __riscv_vmsgtu_vv_u64m8_b8(__VA_ARGS__) |
| #define vmsgtu_vv_u64m8_b8_m | ( | ... | ) | __riscv_vmsgtu_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u8m1_b8 | ( | ... | ) | __riscv_vmsgtu_vv_u8m1_b8(__VA_ARGS__) |
| #define vmsgtu_vv_u8m1_b8_m | ( | ... | ) | __riscv_vmsgtu_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u8m2_b4 | ( | ... | ) | __riscv_vmsgtu_vv_u8m2_b4(__VA_ARGS__) |
| #define vmsgtu_vv_u8m2_b4_m | ( | ... | ) | __riscv_vmsgtu_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u8m4_b2 | ( | ... | ) | __riscv_vmsgtu_vv_u8m4_b2(__VA_ARGS__) |
| #define vmsgtu_vv_u8m4_b2_m | ( | ... | ) | __riscv_vmsgtu_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u8m8_b1 | ( | ... | ) | __riscv_vmsgtu_vv_u8m8_b1(__VA_ARGS__) |
| #define vmsgtu_vv_u8m8_b1_m | ( | ... | ) | __riscv_vmsgtu_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u8mf2_b16 | ( | ... | ) | __riscv_vmsgtu_vv_u8mf2_b16(__VA_ARGS__) |
| #define vmsgtu_vv_u8mf2_b16_m | ( | ... | ) | __riscv_vmsgtu_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u8mf4_b32 | ( | ... | ) | __riscv_vmsgtu_vv_u8mf4_b32(__VA_ARGS__) |
| #define vmsgtu_vv_u8mf4_b32_m | ( | ... | ) | __riscv_vmsgtu_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsgtu_vv_u8mf8_b64 | ( | ... | ) | __riscv_vmsgtu_vv_u8mf8_b64(__VA_ARGS__) |
| #define vmsgtu_vv_u8mf8_b64_m | ( | ... | ) | __riscv_vmsgtu_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u16m1_b16 | ( | ... | ) | __riscv_vmsgtu_vx_u16m1_b16(__VA_ARGS__) |
| #define vmsgtu_vx_u16m1_b16_m | ( | ... | ) | __riscv_vmsgtu_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u16m2_b8 | ( | ... | ) | __riscv_vmsgtu_vx_u16m2_b8(__VA_ARGS__) |
| #define vmsgtu_vx_u16m2_b8_m | ( | ... | ) | __riscv_vmsgtu_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u16m4_b4 | ( | ... | ) | __riscv_vmsgtu_vx_u16m4_b4(__VA_ARGS__) |
| #define vmsgtu_vx_u16m4_b4_m | ( | ... | ) | __riscv_vmsgtu_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u16m8_b2 | ( | ... | ) | __riscv_vmsgtu_vx_u16m8_b2(__VA_ARGS__) |
| #define vmsgtu_vx_u16m8_b2_m | ( | ... | ) | __riscv_vmsgtu_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u16mf2_b32 | ( | ... | ) | __riscv_vmsgtu_vx_u16mf2_b32(__VA_ARGS__) |
| #define vmsgtu_vx_u16mf2_b32_m | ( | ... | ) | __riscv_vmsgtu_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u16mf4_b64 | ( | ... | ) | __riscv_vmsgtu_vx_u16mf4_b64(__VA_ARGS__) |
| #define vmsgtu_vx_u16mf4_b64_m | ( | ... | ) | __riscv_vmsgtu_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u32m1_b32 | ( | ... | ) | __riscv_vmsgtu_vx_u32m1_b32(__VA_ARGS__) |
| #define vmsgtu_vx_u32m1_b32_m | ( | ... | ) | __riscv_vmsgtu_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u32m2_b16 | ( | ... | ) | __riscv_vmsgtu_vx_u32m2_b16(__VA_ARGS__) |
| #define vmsgtu_vx_u32m2_b16_m | ( | ... | ) | __riscv_vmsgtu_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u32m4_b8 | ( | ... | ) | __riscv_vmsgtu_vx_u32m4_b8(__VA_ARGS__) |
| #define vmsgtu_vx_u32m4_b8_m | ( | ... | ) | __riscv_vmsgtu_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u32m8_b4 | ( | ... | ) | __riscv_vmsgtu_vx_u32m8_b4(__VA_ARGS__) |
| #define vmsgtu_vx_u32m8_b4_m | ( | ... | ) | __riscv_vmsgtu_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u32mf2_b64 | ( | ... | ) | __riscv_vmsgtu_vx_u32mf2_b64(__VA_ARGS__) |
| #define vmsgtu_vx_u32mf2_b64_m | ( | ... | ) | __riscv_vmsgtu_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u64m1_b64 | ( | ... | ) | __riscv_vmsgtu_vx_u64m1_b64(__VA_ARGS__) |
| #define vmsgtu_vx_u64m1_b64_m | ( | ... | ) | __riscv_vmsgtu_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u64m2_b32 | ( | ... | ) | __riscv_vmsgtu_vx_u64m2_b32(__VA_ARGS__) |
| #define vmsgtu_vx_u64m2_b32_m | ( | ... | ) | __riscv_vmsgtu_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u64m4_b16 | ( | ... | ) | __riscv_vmsgtu_vx_u64m4_b16(__VA_ARGS__) |
| #define vmsgtu_vx_u64m4_b16_m | ( | ... | ) | __riscv_vmsgtu_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u64m8_b8 | ( | ... | ) | __riscv_vmsgtu_vx_u64m8_b8(__VA_ARGS__) |
| #define vmsgtu_vx_u64m8_b8_m | ( | ... | ) | __riscv_vmsgtu_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u8m1_b8 | ( | ... | ) | __riscv_vmsgtu_vx_u8m1_b8(__VA_ARGS__) |
| #define vmsgtu_vx_u8m1_b8_m | ( | ... | ) | __riscv_vmsgtu_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u8m2_b4 | ( | ... | ) | __riscv_vmsgtu_vx_u8m2_b4(__VA_ARGS__) |
| #define vmsgtu_vx_u8m2_b4_m | ( | ... | ) | __riscv_vmsgtu_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u8m4_b2 | ( | ... | ) | __riscv_vmsgtu_vx_u8m4_b2(__VA_ARGS__) |
| #define vmsgtu_vx_u8m4_b2_m | ( | ... | ) | __riscv_vmsgtu_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u8m8_b1 | ( | ... | ) | __riscv_vmsgtu_vx_u8m8_b1(__VA_ARGS__) |
| #define vmsgtu_vx_u8m8_b1_m | ( | ... | ) | __riscv_vmsgtu_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u8mf2_b16 | ( | ... | ) | __riscv_vmsgtu_vx_u8mf2_b16(__VA_ARGS__) |
| #define vmsgtu_vx_u8mf2_b16_m | ( | ... | ) | __riscv_vmsgtu_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u8mf4_b32 | ( | ... | ) | __riscv_vmsgtu_vx_u8mf4_b32(__VA_ARGS__) |
| #define vmsgtu_vx_u8mf4_b32_m | ( | ... | ) | __riscv_vmsgtu_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsgtu_vx_u8mf8_b64 | ( | ... | ) | __riscv_vmsgtu_vx_u8mf8_b64(__VA_ARGS__) |
| #define vmsgtu_vx_u8mf8_b64_m | ( | ... | ) | __riscv_vmsgtu_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsif_m_b1 | ( | ... | ) | __riscv_vmsif_m_b1(__VA_ARGS__) |
| #define vmsif_m_b16 | ( | ... | ) | __riscv_vmsif_m_b16(__VA_ARGS__) |
| #define vmsif_m_b16_m | ( | ... | ) | __riscv_vmsif_m_b16_mu(__VA_ARGS__) |
| #define vmsif_m_b1_m | ( | ... | ) | __riscv_vmsif_m_b1_mu(__VA_ARGS__) |
| #define vmsif_m_b2 | ( | ... | ) | __riscv_vmsif_m_b2(__VA_ARGS__) |
| #define vmsif_m_b2_m | ( | ... | ) | __riscv_vmsif_m_b2_mu(__VA_ARGS__) |
| #define vmsif_m_b32 | ( | ... | ) | __riscv_vmsif_m_b32(__VA_ARGS__) |
| #define vmsif_m_b32_m | ( | ... | ) | __riscv_vmsif_m_b32_mu(__VA_ARGS__) |
| #define vmsif_m_b4 | ( | ... | ) | __riscv_vmsif_m_b4(__VA_ARGS__) |
| #define vmsif_m_b4_m | ( | ... | ) | __riscv_vmsif_m_b4_mu(__VA_ARGS__) |
| #define vmsif_m_b64 | ( | ... | ) | __riscv_vmsif_m_b64(__VA_ARGS__) |
| #define vmsif_m_b64_m | ( | ... | ) | __riscv_vmsif_m_b64_mu(__VA_ARGS__) |
| #define vmsif_m_b8 | ( | ... | ) | __riscv_vmsif_m_b8(__VA_ARGS__) |
| #define vmsif_m_b8_m | ( | ... | ) | __riscv_vmsif_m_b8_mu(__VA_ARGS__) |
| #define vmsle_vv_i16m1_b16 | ( | ... | ) | __riscv_vmsle_vv_i16m1_b16(__VA_ARGS__) |
| #define vmsle_vv_i16m1_b16_m | ( | ... | ) | __riscv_vmsle_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define vmsle_vv_i16m2_b8 | ( | ... | ) | __riscv_vmsle_vv_i16m2_b8(__VA_ARGS__) |
| #define vmsle_vv_i16m2_b8_m | ( | ... | ) | __riscv_vmsle_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define vmsle_vv_i16m4_b4 | ( | ... | ) | __riscv_vmsle_vv_i16m4_b4(__VA_ARGS__) |
| #define vmsle_vv_i16m4_b4_m | ( | ... | ) | __riscv_vmsle_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define vmsle_vv_i16m8_b2 | ( | ... | ) | __riscv_vmsle_vv_i16m8_b2(__VA_ARGS__) |
| #define vmsle_vv_i16m8_b2_m | ( | ... | ) | __riscv_vmsle_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define vmsle_vv_i16mf2_b32 | ( | ... | ) | __riscv_vmsle_vv_i16mf2_b32(__VA_ARGS__) |
| #define vmsle_vv_i16mf2_b32_m | ( | ... | ) | __riscv_vmsle_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmsle_vv_i16mf4_b64 | ( | ... | ) | __riscv_vmsle_vv_i16mf4_b64(__VA_ARGS__) |
| #define vmsle_vv_i16mf4_b64_m | ( | ... | ) | __riscv_vmsle_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmsle_vv_i32m1_b32 | ( | ... | ) | __riscv_vmsle_vv_i32m1_b32(__VA_ARGS__) |
| #define vmsle_vv_i32m1_b32_m | ( | ... | ) | __riscv_vmsle_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define vmsle_vv_i32m2_b16 | ( | ... | ) | __riscv_vmsle_vv_i32m2_b16(__VA_ARGS__) |
| #define vmsle_vv_i32m2_b16_m | ( | ... | ) | __riscv_vmsle_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define vmsle_vv_i32m4_b8 | ( | ... | ) | __riscv_vmsle_vv_i32m4_b8(__VA_ARGS__) |
| #define vmsle_vv_i32m4_b8_m | ( | ... | ) | __riscv_vmsle_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define vmsle_vv_i32m8_b4 | ( | ... | ) | __riscv_vmsle_vv_i32m8_b4(__VA_ARGS__) |
| #define vmsle_vv_i32m8_b4_m | ( | ... | ) | __riscv_vmsle_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define vmsle_vv_i32mf2_b64 | ( | ... | ) | __riscv_vmsle_vv_i32mf2_b64(__VA_ARGS__) |
| #define vmsle_vv_i32mf2_b64_m | ( | ... | ) | __riscv_vmsle_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmsle_vv_i64m1_b64 | ( | ... | ) | __riscv_vmsle_vv_i64m1_b64(__VA_ARGS__) |
| #define vmsle_vv_i64m1_b64_m | ( | ... | ) | __riscv_vmsle_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define vmsle_vv_i64m2_b32 | ( | ... | ) | __riscv_vmsle_vv_i64m2_b32(__VA_ARGS__) |
| #define vmsle_vv_i64m2_b32_m | ( | ... | ) | __riscv_vmsle_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define vmsle_vv_i64m4_b16 | ( | ... | ) | __riscv_vmsle_vv_i64m4_b16(__VA_ARGS__) |
| #define vmsle_vv_i64m4_b16_m | ( | ... | ) | __riscv_vmsle_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define vmsle_vv_i64m8_b8 | ( | ... | ) | __riscv_vmsle_vv_i64m8_b8(__VA_ARGS__) |
| #define vmsle_vv_i64m8_b8_m | ( | ... | ) | __riscv_vmsle_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define vmsle_vv_i8m1_b8 | ( | ... | ) | __riscv_vmsle_vv_i8m1_b8(__VA_ARGS__) |
| #define vmsle_vv_i8m1_b8_m | ( | ... | ) | __riscv_vmsle_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define vmsle_vv_i8m2_b4 | ( | ... | ) | __riscv_vmsle_vv_i8m2_b4(__VA_ARGS__) |
| #define vmsle_vv_i8m2_b4_m | ( | ... | ) | __riscv_vmsle_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define vmsle_vv_i8m4_b2 | ( | ... | ) | __riscv_vmsle_vv_i8m4_b2(__VA_ARGS__) |
| #define vmsle_vv_i8m4_b2_m | ( | ... | ) | __riscv_vmsle_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define vmsle_vv_i8m8_b1 | ( | ... | ) | __riscv_vmsle_vv_i8m8_b1(__VA_ARGS__) |
| #define vmsle_vv_i8m8_b1_m | ( | ... | ) | __riscv_vmsle_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define vmsle_vv_i8mf2_b16 | ( | ... | ) | __riscv_vmsle_vv_i8mf2_b16(__VA_ARGS__) |
| #define vmsle_vv_i8mf2_b16_m | ( | ... | ) | __riscv_vmsle_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmsle_vv_i8mf4_b32 | ( | ... | ) | __riscv_vmsle_vv_i8mf4_b32(__VA_ARGS__) |
| #define vmsle_vv_i8mf4_b32_m | ( | ... | ) | __riscv_vmsle_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmsle_vv_i8mf8_b64 | ( | ... | ) | __riscv_vmsle_vv_i8mf8_b64(__VA_ARGS__) |
| #define vmsle_vv_i8mf8_b64_m | ( | ... | ) | __riscv_vmsle_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsle_vx_i16m1_b16 | ( | ... | ) | __riscv_vmsle_vx_i16m1_b16(__VA_ARGS__) |
| #define vmsle_vx_i16m1_b16_m | ( | ... | ) | __riscv_vmsle_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define vmsle_vx_i16m2_b8 | ( | ... | ) | __riscv_vmsle_vx_i16m2_b8(__VA_ARGS__) |
| #define vmsle_vx_i16m2_b8_m | ( | ... | ) | __riscv_vmsle_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define vmsle_vx_i16m4_b4 | ( | ... | ) | __riscv_vmsle_vx_i16m4_b4(__VA_ARGS__) |
| #define vmsle_vx_i16m4_b4_m | ( | ... | ) | __riscv_vmsle_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define vmsle_vx_i16m8_b2 | ( | ... | ) | __riscv_vmsle_vx_i16m8_b2(__VA_ARGS__) |
| #define vmsle_vx_i16m8_b2_m | ( | ... | ) | __riscv_vmsle_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define vmsle_vx_i16mf2_b32 | ( | ... | ) | __riscv_vmsle_vx_i16mf2_b32(__VA_ARGS__) |
| #define vmsle_vx_i16mf2_b32_m | ( | ... | ) | __riscv_vmsle_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmsle_vx_i16mf4_b64 | ( | ... | ) | __riscv_vmsle_vx_i16mf4_b64(__VA_ARGS__) |
| #define vmsle_vx_i16mf4_b64_m | ( | ... | ) | __riscv_vmsle_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmsle_vx_i32m1_b32 | ( | ... | ) | __riscv_vmsle_vx_i32m1_b32(__VA_ARGS__) |
| #define vmsle_vx_i32m1_b32_m | ( | ... | ) | __riscv_vmsle_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define vmsle_vx_i32m2_b16 | ( | ... | ) | __riscv_vmsle_vx_i32m2_b16(__VA_ARGS__) |
| #define vmsle_vx_i32m2_b16_m | ( | ... | ) | __riscv_vmsle_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define vmsle_vx_i32m4_b8 | ( | ... | ) | __riscv_vmsle_vx_i32m4_b8(__VA_ARGS__) |
| #define vmsle_vx_i32m4_b8_m | ( | ... | ) | __riscv_vmsle_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define vmsle_vx_i32m8_b4 | ( | ... | ) | __riscv_vmsle_vx_i32m8_b4(__VA_ARGS__) |
| #define vmsle_vx_i32m8_b4_m | ( | ... | ) | __riscv_vmsle_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define vmsle_vx_i32mf2_b64 | ( | ... | ) | __riscv_vmsle_vx_i32mf2_b64(__VA_ARGS__) |
| #define vmsle_vx_i32mf2_b64_m | ( | ... | ) | __riscv_vmsle_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmsle_vx_i64m1_b64 | ( | ... | ) | __riscv_vmsle_vx_i64m1_b64(__VA_ARGS__) |
| #define vmsle_vx_i64m1_b64_m | ( | ... | ) | __riscv_vmsle_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define vmsle_vx_i64m2_b32 | ( | ... | ) | __riscv_vmsle_vx_i64m2_b32(__VA_ARGS__) |
| #define vmsle_vx_i64m2_b32_m | ( | ... | ) | __riscv_vmsle_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define vmsle_vx_i64m4_b16 | ( | ... | ) | __riscv_vmsle_vx_i64m4_b16(__VA_ARGS__) |
| #define vmsle_vx_i64m4_b16_m | ( | ... | ) | __riscv_vmsle_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define vmsle_vx_i64m8_b8 | ( | ... | ) | __riscv_vmsle_vx_i64m8_b8(__VA_ARGS__) |
| #define vmsle_vx_i64m8_b8_m | ( | ... | ) | __riscv_vmsle_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define vmsle_vx_i8m1_b8 | ( | ... | ) | __riscv_vmsle_vx_i8m1_b8(__VA_ARGS__) |
| #define vmsle_vx_i8m1_b8_m | ( | ... | ) | __riscv_vmsle_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define vmsle_vx_i8m2_b4 | ( | ... | ) | __riscv_vmsle_vx_i8m2_b4(__VA_ARGS__) |
| #define vmsle_vx_i8m2_b4_m | ( | ... | ) | __riscv_vmsle_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define vmsle_vx_i8m4_b2 | ( | ... | ) | __riscv_vmsle_vx_i8m4_b2(__VA_ARGS__) |
| #define vmsle_vx_i8m4_b2_m | ( | ... | ) | __riscv_vmsle_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define vmsle_vx_i8m8_b1 | ( | ... | ) | __riscv_vmsle_vx_i8m8_b1(__VA_ARGS__) |
| #define vmsle_vx_i8m8_b1_m | ( | ... | ) | __riscv_vmsle_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define vmsle_vx_i8mf2_b16 | ( | ... | ) | __riscv_vmsle_vx_i8mf2_b16(__VA_ARGS__) |
| #define vmsle_vx_i8mf2_b16_m | ( | ... | ) | __riscv_vmsle_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmsle_vx_i8mf4_b32 | ( | ... | ) | __riscv_vmsle_vx_i8mf4_b32(__VA_ARGS__) |
| #define vmsle_vx_i8mf4_b32_m | ( | ... | ) | __riscv_vmsle_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmsle_vx_i8mf8_b64 | ( | ... | ) | __riscv_vmsle_vx_i8mf8_b64(__VA_ARGS__) |
| #define vmsle_vx_i8mf8_b64_m | ( | ... | ) | __riscv_vmsle_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsleu_vv_u16m1_b16 | ( | ... | ) | __riscv_vmsleu_vv_u16m1_b16(__VA_ARGS__) |
| #define vmsleu_vv_u16m1_b16_m | ( | ... | ) | __riscv_vmsleu_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsleu_vv_u16m2_b8 | ( | ... | ) | __riscv_vmsleu_vv_u16m2_b8(__VA_ARGS__) |
| #define vmsleu_vv_u16m2_b8_m | ( | ... | ) | __riscv_vmsleu_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsleu_vv_u16m4_b4 | ( | ... | ) | __riscv_vmsleu_vv_u16m4_b4(__VA_ARGS__) |
| #define vmsleu_vv_u16m4_b4_m | ( | ... | ) | __riscv_vmsleu_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsleu_vv_u16m8_b2 | ( | ... | ) | __riscv_vmsleu_vv_u16m8_b2(__VA_ARGS__) |
| #define vmsleu_vv_u16m8_b2_m | ( | ... | ) | __riscv_vmsleu_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsleu_vv_u16mf2_b32 | ( | ... | ) | __riscv_vmsleu_vv_u16mf2_b32(__VA_ARGS__) |
| #define vmsleu_vv_u16mf2_b32_m | ( | ... | ) | __riscv_vmsleu_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsleu_vv_u16mf4_b64 | ( | ... | ) | __riscv_vmsleu_vv_u16mf4_b64(__VA_ARGS__) |
| #define vmsleu_vv_u16mf4_b64_m | ( | ... | ) | __riscv_vmsleu_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsleu_vv_u32m1_b32 | ( | ... | ) | __riscv_vmsleu_vv_u32m1_b32(__VA_ARGS__) |
| #define vmsleu_vv_u32m1_b32_m | ( | ... | ) | __riscv_vmsleu_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsleu_vv_u32m2_b16 | ( | ... | ) | __riscv_vmsleu_vv_u32m2_b16(__VA_ARGS__) |
| #define vmsleu_vv_u32m2_b16_m | ( | ... | ) | __riscv_vmsleu_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsleu_vv_u32m4_b8 | ( | ... | ) | __riscv_vmsleu_vv_u32m4_b8(__VA_ARGS__) |
| #define vmsleu_vv_u32m4_b8_m | ( | ... | ) | __riscv_vmsleu_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsleu_vv_u32m8_b4 | ( | ... | ) | __riscv_vmsleu_vv_u32m8_b4(__VA_ARGS__) |
| #define vmsleu_vv_u32m8_b4_m | ( | ... | ) | __riscv_vmsleu_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsleu_vv_u32mf2_b64 | ( | ... | ) | __riscv_vmsleu_vv_u32mf2_b64(__VA_ARGS__) |
| #define vmsleu_vv_u32mf2_b64_m | ( | ... | ) | __riscv_vmsleu_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsleu_vv_u64m1_b64 | ( | ... | ) | __riscv_vmsleu_vv_u64m1_b64(__VA_ARGS__) |
| #define vmsleu_vv_u64m1_b64_m | ( | ... | ) | __riscv_vmsleu_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsleu_vv_u64m2_b32 | ( | ... | ) | __riscv_vmsleu_vv_u64m2_b32(__VA_ARGS__) |
| #define vmsleu_vv_u64m2_b32_m | ( | ... | ) | __riscv_vmsleu_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsleu_vv_u64m4_b16 | ( | ... | ) | __riscv_vmsleu_vv_u64m4_b16(__VA_ARGS__) |
| #define vmsleu_vv_u64m4_b16_m | ( | ... | ) | __riscv_vmsleu_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsleu_vv_u64m8_b8 | ( | ... | ) | __riscv_vmsleu_vv_u64m8_b8(__VA_ARGS__) |
| #define vmsleu_vv_u64m8_b8_m | ( | ... | ) | __riscv_vmsleu_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsleu_vv_u8m1_b8 | ( | ... | ) | __riscv_vmsleu_vv_u8m1_b8(__VA_ARGS__) |
| #define vmsleu_vv_u8m1_b8_m | ( | ... | ) | __riscv_vmsleu_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsleu_vv_u8m2_b4 | ( | ... | ) | __riscv_vmsleu_vv_u8m2_b4(__VA_ARGS__) |
| #define vmsleu_vv_u8m2_b4_m | ( | ... | ) | __riscv_vmsleu_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsleu_vv_u8m4_b2 | ( | ... | ) | __riscv_vmsleu_vv_u8m4_b2(__VA_ARGS__) |
| #define vmsleu_vv_u8m4_b2_m | ( | ... | ) | __riscv_vmsleu_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsleu_vv_u8m8_b1 | ( | ... | ) | __riscv_vmsleu_vv_u8m8_b1(__VA_ARGS__) |
| #define vmsleu_vv_u8m8_b1_m | ( | ... | ) | __riscv_vmsleu_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsleu_vv_u8mf2_b16 | ( | ... | ) | __riscv_vmsleu_vv_u8mf2_b16(__VA_ARGS__) |
| #define vmsleu_vv_u8mf2_b16_m | ( | ... | ) | __riscv_vmsleu_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsleu_vv_u8mf4_b32 | ( | ... | ) | __riscv_vmsleu_vv_u8mf4_b32(__VA_ARGS__) |
| #define vmsleu_vv_u8mf4_b32_m | ( | ... | ) | __riscv_vmsleu_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsleu_vv_u8mf8_b64 | ( | ... | ) | __riscv_vmsleu_vv_u8mf8_b64(__VA_ARGS__) |
| #define vmsleu_vv_u8mf8_b64_m | ( | ... | ) | __riscv_vmsleu_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsleu_vx_u16m1_b16 | ( | ... | ) | __riscv_vmsleu_vx_u16m1_b16(__VA_ARGS__) |
| #define vmsleu_vx_u16m1_b16_m | ( | ... | ) | __riscv_vmsleu_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsleu_vx_u16m2_b8 | ( | ... | ) | __riscv_vmsleu_vx_u16m2_b8(__VA_ARGS__) |
| #define vmsleu_vx_u16m2_b8_m | ( | ... | ) | __riscv_vmsleu_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsleu_vx_u16m4_b4 | ( | ... | ) | __riscv_vmsleu_vx_u16m4_b4(__VA_ARGS__) |
| #define vmsleu_vx_u16m4_b4_m | ( | ... | ) | __riscv_vmsleu_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsleu_vx_u16m8_b2 | ( | ... | ) | __riscv_vmsleu_vx_u16m8_b2(__VA_ARGS__) |
| #define vmsleu_vx_u16m8_b2_m | ( | ... | ) | __riscv_vmsleu_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsleu_vx_u16mf2_b32 | ( | ... | ) | __riscv_vmsleu_vx_u16mf2_b32(__VA_ARGS__) |
| #define vmsleu_vx_u16mf2_b32_m | ( | ... | ) | __riscv_vmsleu_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsleu_vx_u16mf4_b64 | ( | ... | ) | __riscv_vmsleu_vx_u16mf4_b64(__VA_ARGS__) |
| #define vmsleu_vx_u16mf4_b64_m | ( | ... | ) | __riscv_vmsleu_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsleu_vx_u32m1_b32 | ( | ... | ) | __riscv_vmsleu_vx_u32m1_b32(__VA_ARGS__) |
| #define vmsleu_vx_u32m1_b32_m | ( | ... | ) | __riscv_vmsleu_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsleu_vx_u32m2_b16 | ( | ... | ) | __riscv_vmsleu_vx_u32m2_b16(__VA_ARGS__) |
| #define vmsleu_vx_u32m2_b16_m | ( | ... | ) | __riscv_vmsleu_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsleu_vx_u32m4_b8 | ( | ... | ) | __riscv_vmsleu_vx_u32m4_b8(__VA_ARGS__) |
| #define vmsleu_vx_u32m4_b8_m | ( | ... | ) | __riscv_vmsleu_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsleu_vx_u32m8_b4 | ( | ... | ) | __riscv_vmsleu_vx_u32m8_b4(__VA_ARGS__) |
| #define vmsleu_vx_u32m8_b4_m | ( | ... | ) | __riscv_vmsleu_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsleu_vx_u32mf2_b64 | ( | ... | ) | __riscv_vmsleu_vx_u32mf2_b64(__VA_ARGS__) |
| #define vmsleu_vx_u32mf2_b64_m | ( | ... | ) | __riscv_vmsleu_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsleu_vx_u64m1_b64 | ( | ... | ) | __riscv_vmsleu_vx_u64m1_b64(__VA_ARGS__) |
| #define vmsleu_vx_u64m1_b64_m | ( | ... | ) | __riscv_vmsleu_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsleu_vx_u64m2_b32 | ( | ... | ) | __riscv_vmsleu_vx_u64m2_b32(__VA_ARGS__) |
| #define vmsleu_vx_u64m2_b32_m | ( | ... | ) | __riscv_vmsleu_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsleu_vx_u64m4_b16 | ( | ... | ) | __riscv_vmsleu_vx_u64m4_b16(__VA_ARGS__) |
| #define vmsleu_vx_u64m4_b16_m | ( | ... | ) | __riscv_vmsleu_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsleu_vx_u64m8_b8 | ( | ... | ) | __riscv_vmsleu_vx_u64m8_b8(__VA_ARGS__) |
| #define vmsleu_vx_u64m8_b8_m | ( | ... | ) | __riscv_vmsleu_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsleu_vx_u8m1_b8 | ( | ... | ) | __riscv_vmsleu_vx_u8m1_b8(__VA_ARGS__) |
| #define vmsleu_vx_u8m1_b8_m | ( | ... | ) | __riscv_vmsleu_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsleu_vx_u8m2_b4 | ( | ... | ) | __riscv_vmsleu_vx_u8m2_b4(__VA_ARGS__) |
| #define vmsleu_vx_u8m2_b4_m | ( | ... | ) | __riscv_vmsleu_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsleu_vx_u8m4_b2 | ( | ... | ) | __riscv_vmsleu_vx_u8m4_b2(__VA_ARGS__) |
| #define vmsleu_vx_u8m4_b2_m | ( | ... | ) | __riscv_vmsleu_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsleu_vx_u8m8_b1 | ( | ... | ) | __riscv_vmsleu_vx_u8m8_b1(__VA_ARGS__) |
| #define vmsleu_vx_u8m8_b1_m | ( | ... | ) | __riscv_vmsleu_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsleu_vx_u8mf2_b16 | ( | ... | ) | __riscv_vmsleu_vx_u8mf2_b16(__VA_ARGS__) |
| #define vmsleu_vx_u8mf2_b16_m | ( | ... | ) | __riscv_vmsleu_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsleu_vx_u8mf4_b32 | ( | ... | ) | __riscv_vmsleu_vx_u8mf4_b32(__VA_ARGS__) |
| #define vmsleu_vx_u8mf4_b32_m | ( | ... | ) | __riscv_vmsleu_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsleu_vx_u8mf8_b64 | ( | ... | ) | __riscv_vmsleu_vx_u8mf8_b64(__VA_ARGS__) |
| #define vmsleu_vx_u8mf8_b64_m | ( | ... | ) | __riscv_vmsleu_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmslt_vv_i16m1_b16 | ( | ... | ) | __riscv_vmslt_vv_i16m1_b16(__VA_ARGS__) |
| #define vmslt_vv_i16m1_b16_m | ( | ... | ) | __riscv_vmslt_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define vmslt_vv_i16m2_b8 | ( | ... | ) | __riscv_vmslt_vv_i16m2_b8(__VA_ARGS__) |
| #define vmslt_vv_i16m2_b8_m | ( | ... | ) | __riscv_vmslt_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define vmslt_vv_i16m4_b4 | ( | ... | ) | __riscv_vmslt_vv_i16m4_b4(__VA_ARGS__) |
| #define vmslt_vv_i16m4_b4_m | ( | ... | ) | __riscv_vmslt_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define vmslt_vv_i16m8_b2 | ( | ... | ) | __riscv_vmslt_vv_i16m8_b2(__VA_ARGS__) |
| #define vmslt_vv_i16m8_b2_m | ( | ... | ) | __riscv_vmslt_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define vmslt_vv_i16mf2_b32 | ( | ... | ) | __riscv_vmslt_vv_i16mf2_b32(__VA_ARGS__) |
| #define vmslt_vv_i16mf2_b32_m | ( | ... | ) | __riscv_vmslt_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmslt_vv_i16mf4_b64 | ( | ... | ) | __riscv_vmslt_vv_i16mf4_b64(__VA_ARGS__) |
| #define vmslt_vv_i16mf4_b64_m | ( | ... | ) | __riscv_vmslt_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmslt_vv_i32m1_b32 | ( | ... | ) | __riscv_vmslt_vv_i32m1_b32(__VA_ARGS__) |
| #define vmslt_vv_i32m1_b32_m | ( | ... | ) | __riscv_vmslt_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define vmslt_vv_i32m2_b16 | ( | ... | ) | __riscv_vmslt_vv_i32m2_b16(__VA_ARGS__) |
| #define vmslt_vv_i32m2_b16_m | ( | ... | ) | __riscv_vmslt_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define vmslt_vv_i32m4_b8 | ( | ... | ) | __riscv_vmslt_vv_i32m4_b8(__VA_ARGS__) |
| #define vmslt_vv_i32m4_b8_m | ( | ... | ) | __riscv_vmslt_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define vmslt_vv_i32m8_b4 | ( | ... | ) | __riscv_vmslt_vv_i32m8_b4(__VA_ARGS__) |
| #define vmslt_vv_i32m8_b4_m | ( | ... | ) | __riscv_vmslt_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define vmslt_vv_i32mf2_b64 | ( | ... | ) | __riscv_vmslt_vv_i32mf2_b64(__VA_ARGS__) |
| #define vmslt_vv_i32mf2_b64_m | ( | ... | ) | __riscv_vmslt_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmslt_vv_i64m1_b64 | ( | ... | ) | __riscv_vmslt_vv_i64m1_b64(__VA_ARGS__) |
| #define vmslt_vv_i64m1_b64_m | ( | ... | ) | __riscv_vmslt_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define vmslt_vv_i64m2_b32 | ( | ... | ) | __riscv_vmslt_vv_i64m2_b32(__VA_ARGS__) |
| #define vmslt_vv_i64m2_b32_m | ( | ... | ) | __riscv_vmslt_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define vmslt_vv_i64m4_b16 | ( | ... | ) | __riscv_vmslt_vv_i64m4_b16(__VA_ARGS__) |
| #define vmslt_vv_i64m4_b16_m | ( | ... | ) | __riscv_vmslt_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define vmslt_vv_i64m8_b8 | ( | ... | ) | __riscv_vmslt_vv_i64m8_b8(__VA_ARGS__) |
| #define vmslt_vv_i64m8_b8_m | ( | ... | ) | __riscv_vmslt_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define vmslt_vv_i8m1_b8 | ( | ... | ) | __riscv_vmslt_vv_i8m1_b8(__VA_ARGS__) |
| #define vmslt_vv_i8m1_b8_m | ( | ... | ) | __riscv_vmslt_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define vmslt_vv_i8m2_b4 | ( | ... | ) | __riscv_vmslt_vv_i8m2_b4(__VA_ARGS__) |
| #define vmslt_vv_i8m2_b4_m | ( | ... | ) | __riscv_vmslt_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define vmslt_vv_i8m4_b2 | ( | ... | ) | __riscv_vmslt_vv_i8m4_b2(__VA_ARGS__) |
| #define vmslt_vv_i8m4_b2_m | ( | ... | ) | __riscv_vmslt_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define vmslt_vv_i8m8_b1 | ( | ... | ) | __riscv_vmslt_vv_i8m8_b1(__VA_ARGS__) |
| #define vmslt_vv_i8m8_b1_m | ( | ... | ) | __riscv_vmslt_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define vmslt_vv_i8mf2_b16 | ( | ... | ) | __riscv_vmslt_vv_i8mf2_b16(__VA_ARGS__) |
| #define vmslt_vv_i8mf2_b16_m | ( | ... | ) | __riscv_vmslt_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmslt_vv_i8mf4_b32 | ( | ... | ) | __riscv_vmslt_vv_i8mf4_b32(__VA_ARGS__) |
| #define vmslt_vv_i8mf4_b32_m | ( | ... | ) | __riscv_vmslt_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmslt_vv_i8mf8_b64 | ( | ... | ) | __riscv_vmslt_vv_i8mf8_b64(__VA_ARGS__) |
| #define vmslt_vv_i8mf8_b64_m | ( | ... | ) | __riscv_vmslt_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmslt_vx_i16m1_b16 | ( | ... | ) | __riscv_vmslt_vx_i16m1_b16(__VA_ARGS__) |
| #define vmslt_vx_i16m1_b16_m | ( | ... | ) | __riscv_vmslt_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define vmslt_vx_i16m2_b8 | ( | ... | ) | __riscv_vmslt_vx_i16m2_b8(__VA_ARGS__) |
| #define vmslt_vx_i16m2_b8_m | ( | ... | ) | __riscv_vmslt_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define vmslt_vx_i16m4_b4 | ( | ... | ) | __riscv_vmslt_vx_i16m4_b4(__VA_ARGS__) |
| #define vmslt_vx_i16m4_b4_m | ( | ... | ) | __riscv_vmslt_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define vmslt_vx_i16m8_b2 | ( | ... | ) | __riscv_vmslt_vx_i16m8_b2(__VA_ARGS__) |
| #define vmslt_vx_i16m8_b2_m | ( | ... | ) | __riscv_vmslt_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define vmslt_vx_i16mf2_b32 | ( | ... | ) | __riscv_vmslt_vx_i16mf2_b32(__VA_ARGS__) |
| #define vmslt_vx_i16mf2_b32_m | ( | ... | ) | __riscv_vmslt_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmslt_vx_i16mf4_b64 | ( | ... | ) | __riscv_vmslt_vx_i16mf4_b64(__VA_ARGS__) |
| #define vmslt_vx_i16mf4_b64_m | ( | ... | ) | __riscv_vmslt_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmslt_vx_i32m1_b32 | ( | ... | ) | __riscv_vmslt_vx_i32m1_b32(__VA_ARGS__) |
| #define vmslt_vx_i32m1_b32_m | ( | ... | ) | __riscv_vmslt_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define vmslt_vx_i32m2_b16 | ( | ... | ) | __riscv_vmslt_vx_i32m2_b16(__VA_ARGS__) |
| #define vmslt_vx_i32m2_b16_m | ( | ... | ) | __riscv_vmslt_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define vmslt_vx_i32m4_b8 | ( | ... | ) | __riscv_vmslt_vx_i32m4_b8(__VA_ARGS__) |
| #define vmslt_vx_i32m4_b8_m | ( | ... | ) | __riscv_vmslt_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define vmslt_vx_i32m8_b4 | ( | ... | ) | __riscv_vmslt_vx_i32m8_b4(__VA_ARGS__) |
| #define vmslt_vx_i32m8_b4_m | ( | ... | ) | __riscv_vmslt_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define vmslt_vx_i32mf2_b64 | ( | ... | ) | __riscv_vmslt_vx_i32mf2_b64(__VA_ARGS__) |
| #define vmslt_vx_i32mf2_b64_m | ( | ... | ) | __riscv_vmslt_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmslt_vx_i64m1_b64 | ( | ... | ) | __riscv_vmslt_vx_i64m1_b64(__VA_ARGS__) |
| #define vmslt_vx_i64m1_b64_m | ( | ... | ) | __riscv_vmslt_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define vmslt_vx_i64m2_b32 | ( | ... | ) | __riscv_vmslt_vx_i64m2_b32(__VA_ARGS__) |
| #define vmslt_vx_i64m2_b32_m | ( | ... | ) | __riscv_vmslt_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define vmslt_vx_i64m4_b16 | ( | ... | ) | __riscv_vmslt_vx_i64m4_b16(__VA_ARGS__) |
| #define vmslt_vx_i64m4_b16_m | ( | ... | ) | __riscv_vmslt_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define vmslt_vx_i64m8_b8 | ( | ... | ) | __riscv_vmslt_vx_i64m8_b8(__VA_ARGS__) |
| #define vmslt_vx_i64m8_b8_m | ( | ... | ) | __riscv_vmslt_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define vmslt_vx_i8m1_b8 | ( | ... | ) | __riscv_vmslt_vx_i8m1_b8(__VA_ARGS__) |
| #define vmslt_vx_i8m1_b8_m | ( | ... | ) | __riscv_vmslt_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define vmslt_vx_i8m2_b4 | ( | ... | ) | __riscv_vmslt_vx_i8m2_b4(__VA_ARGS__) |
| #define vmslt_vx_i8m2_b4_m | ( | ... | ) | __riscv_vmslt_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define vmslt_vx_i8m4_b2 | ( | ... | ) | __riscv_vmslt_vx_i8m4_b2(__VA_ARGS__) |
| #define vmslt_vx_i8m4_b2_m | ( | ... | ) | __riscv_vmslt_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define vmslt_vx_i8m8_b1 | ( | ... | ) | __riscv_vmslt_vx_i8m8_b1(__VA_ARGS__) |
| #define vmslt_vx_i8m8_b1_m | ( | ... | ) | __riscv_vmslt_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define vmslt_vx_i8mf2_b16 | ( | ... | ) | __riscv_vmslt_vx_i8mf2_b16(__VA_ARGS__) |
| #define vmslt_vx_i8mf2_b16_m | ( | ... | ) | __riscv_vmslt_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmslt_vx_i8mf4_b32 | ( | ... | ) | __riscv_vmslt_vx_i8mf4_b32(__VA_ARGS__) |
| #define vmslt_vx_i8mf4_b32_m | ( | ... | ) | __riscv_vmslt_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmslt_vx_i8mf8_b64 | ( | ... | ) | __riscv_vmslt_vx_i8mf8_b64(__VA_ARGS__) |
| #define vmslt_vx_i8mf8_b64_m | ( | ... | ) | __riscv_vmslt_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsltu_vv_u16m1_b16 | ( | ... | ) | __riscv_vmsltu_vv_u16m1_b16(__VA_ARGS__) |
| #define vmsltu_vv_u16m1_b16_m | ( | ... | ) | __riscv_vmsltu_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsltu_vv_u16m2_b8 | ( | ... | ) | __riscv_vmsltu_vv_u16m2_b8(__VA_ARGS__) |
| #define vmsltu_vv_u16m2_b8_m | ( | ... | ) | __riscv_vmsltu_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsltu_vv_u16m4_b4 | ( | ... | ) | __riscv_vmsltu_vv_u16m4_b4(__VA_ARGS__) |
| #define vmsltu_vv_u16m4_b4_m | ( | ... | ) | __riscv_vmsltu_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsltu_vv_u16m8_b2 | ( | ... | ) | __riscv_vmsltu_vv_u16m8_b2(__VA_ARGS__) |
| #define vmsltu_vv_u16m8_b2_m | ( | ... | ) | __riscv_vmsltu_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsltu_vv_u16mf2_b32 | ( | ... | ) | __riscv_vmsltu_vv_u16mf2_b32(__VA_ARGS__) |
| #define vmsltu_vv_u16mf2_b32_m | ( | ... | ) | __riscv_vmsltu_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsltu_vv_u16mf4_b64 | ( | ... | ) | __riscv_vmsltu_vv_u16mf4_b64(__VA_ARGS__) |
| #define vmsltu_vv_u16mf4_b64_m | ( | ... | ) | __riscv_vmsltu_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsltu_vv_u32m1_b32 | ( | ... | ) | __riscv_vmsltu_vv_u32m1_b32(__VA_ARGS__) |
| #define vmsltu_vv_u32m1_b32_m | ( | ... | ) | __riscv_vmsltu_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsltu_vv_u32m2_b16 | ( | ... | ) | __riscv_vmsltu_vv_u32m2_b16(__VA_ARGS__) |
| #define vmsltu_vv_u32m2_b16_m | ( | ... | ) | __riscv_vmsltu_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsltu_vv_u32m4_b8 | ( | ... | ) | __riscv_vmsltu_vv_u32m4_b8(__VA_ARGS__) |
| #define vmsltu_vv_u32m4_b8_m | ( | ... | ) | __riscv_vmsltu_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsltu_vv_u32m8_b4 | ( | ... | ) | __riscv_vmsltu_vv_u32m8_b4(__VA_ARGS__) |
| #define vmsltu_vv_u32m8_b4_m | ( | ... | ) | __riscv_vmsltu_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsltu_vv_u32mf2_b64 | ( | ... | ) | __riscv_vmsltu_vv_u32mf2_b64(__VA_ARGS__) |
| #define vmsltu_vv_u32mf2_b64_m | ( | ... | ) | __riscv_vmsltu_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsltu_vv_u64m1_b64 | ( | ... | ) | __riscv_vmsltu_vv_u64m1_b64(__VA_ARGS__) |
| #define vmsltu_vv_u64m1_b64_m | ( | ... | ) | __riscv_vmsltu_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsltu_vv_u64m2_b32 | ( | ... | ) | __riscv_vmsltu_vv_u64m2_b32(__VA_ARGS__) |
| #define vmsltu_vv_u64m2_b32_m | ( | ... | ) | __riscv_vmsltu_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsltu_vv_u64m4_b16 | ( | ... | ) | __riscv_vmsltu_vv_u64m4_b16(__VA_ARGS__) |
| #define vmsltu_vv_u64m4_b16_m | ( | ... | ) | __riscv_vmsltu_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsltu_vv_u64m8_b8 | ( | ... | ) | __riscv_vmsltu_vv_u64m8_b8(__VA_ARGS__) |
| #define vmsltu_vv_u64m8_b8_m | ( | ... | ) | __riscv_vmsltu_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsltu_vv_u8m1_b8 | ( | ... | ) | __riscv_vmsltu_vv_u8m1_b8(__VA_ARGS__) |
| #define vmsltu_vv_u8m1_b8_m | ( | ... | ) | __riscv_vmsltu_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsltu_vv_u8m2_b4 | ( | ... | ) | __riscv_vmsltu_vv_u8m2_b4(__VA_ARGS__) |
| #define vmsltu_vv_u8m2_b4_m | ( | ... | ) | __riscv_vmsltu_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsltu_vv_u8m4_b2 | ( | ... | ) | __riscv_vmsltu_vv_u8m4_b2(__VA_ARGS__) |
| #define vmsltu_vv_u8m4_b2_m | ( | ... | ) | __riscv_vmsltu_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsltu_vv_u8m8_b1 | ( | ... | ) | __riscv_vmsltu_vv_u8m8_b1(__VA_ARGS__) |
| #define vmsltu_vv_u8m8_b1_m | ( | ... | ) | __riscv_vmsltu_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsltu_vv_u8mf2_b16 | ( | ... | ) | __riscv_vmsltu_vv_u8mf2_b16(__VA_ARGS__) |
| #define vmsltu_vv_u8mf2_b16_m | ( | ... | ) | __riscv_vmsltu_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsltu_vv_u8mf4_b32 | ( | ... | ) | __riscv_vmsltu_vv_u8mf4_b32(__VA_ARGS__) |
| #define vmsltu_vv_u8mf4_b32_m | ( | ... | ) | __riscv_vmsltu_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsltu_vv_u8mf8_b64 | ( | ... | ) | __riscv_vmsltu_vv_u8mf8_b64(__VA_ARGS__) |
| #define vmsltu_vv_u8mf8_b64_m | ( | ... | ) | __riscv_vmsltu_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsltu_vx_u16m1_b16 | ( | ... | ) | __riscv_vmsltu_vx_u16m1_b16(__VA_ARGS__) |
| #define vmsltu_vx_u16m1_b16_m | ( | ... | ) | __riscv_vmsltu_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsltu_vx_u16m2_b8 | ( | ... | ) | __riscv_vmsltu_vx_u16m2_b8(__VA_ARGS__) |
| #define vmsltu_vx_u16m2_b8_m | ( | ... | ) | __riscv_vmsltu_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsltu_vx_u16m4_b4 | ( | ... | ) | __riscv_vmsltu_vx_u16m4_b4(__VA_ARGS__) |
| #define vmsltu_vx_u16m4_b4_m | ( | ... | ) | __riscv_vmsltu_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsltu_vx_u16m8_b2 | ( | ... | ) | __riscv_vmsltu_vx_u16m8_b2(__VA_ARGS__) |
| #define vmsltu_vx_u16m8_b2_m | ( | ... | ) | __riscv_vmsltu_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsltu_vx_u16mf2_b32 | ( | ... | ) | __riscv_vmsltu_vx_u16mf2_b32(__VA_ARGS__) |
| #define vmsltu_vx_u16mf2_b32_m | ( | ... | ) | __riscv_vmsltu_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsltu_vx_u16mf4_b64 | ( | ... | ) | __riscv_vmsltu_vx_u16mf4_b64(__VA_ARGS__) |
| #define vmsltu_vx_u16mf4_b64_m | ( | ... | ) | __riscv_vmsltu_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsltu_vx_u32m1_b32 | ( | ... | ) | __riscv_vmsltu_vx_u32m1_b32(__VA_ARGS__) |
| #define vmsltu_vx_u32m1_b32_m | ( | ... | ) | __riscv_vmsltu_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsltu_vx_u32m2_b16 | ( | ... | ) | __riscv_vmsltu_vx_u32m2_b16(__VA_ARGS__) |
| #define vmsltu_vx_u32m2_b16_m | ( | ... | ) | __riscv_vmsltu_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsltu_vx_u32m4_b8 | ( | ... | ) | __riscv_vmsltu_vx_u32m4_b8(__VA_ARGS__) |
| #define vmsltu_vx_u32m4_b8_m | ( | ... | ) | __riscv_vmsltu_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsltu_vx_u32m8_b4 | ( | ... | ) | __riscv_vmsltu_vx_u32m8_b4(__VA_ARGS__) |
| #define vmsltu_vx_u32m8_b4_m | ( | ... | ) | __riscv_vmsltu_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsltu_vx_u32mf2_b64 | ( | ... | ) | __riscv_vmsltu_vx_u32mf2_b64(__VA_ARGS__) |
| #define vmsltu_vx_u32mf2_b64_m | ( | ... | ) | __riscv_vmsltu_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsltu_vx_u64m1_b64 | ( | ... | ) | __riscv_vmsltu_vx_u64m1_b64(__VA_ARGS__) |
| #define vmsltu_vx_u64m1_b64_m | ( | ... | ) | __riscv_vmsltu_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsltu_vx_u64m2_b32 | ( | ... | ) | __riscv_vmsltu_vx_u64m2_b32(__VA_ARGS__) |
| #define vmsltu_vx_u64m2_b32_m | ( | ... | ) | __riscv_vmsltu_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsltu_vx_u64m4_b16 | ( | ... | ) | __riscv_vmsltu_vx_u64m4_b16(__VA_ARGS__) |
| #define vmsltu_vx_u64m4_b16_m | ( | ... | ) | __riscv_vmsltu_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsltu_vx_u64m8_b8 | ( | ... | ) | __riscv_vmsltu_vx_u64m8_b8(__VA_ARGS__) |
| #define vmsltu_vx_u64m8_b8_m | ( | ... | ) | __riscv_vmsltu_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsltu_vx_u8m1_b8 | ( | ... | ) | __riscv_vmsltu_vx_u8m1_b8(__VA_ARGS__) |
| #define vmsltu_vx_u8m1_b8_m | ( | ... | ) | __riscv_vmsltu_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsltu_vx_u8m2_b4 | ( | ... | ) | __riscv_vmsltu_vx_u8m2_b4(__VA_ARGS__) |
| #define vmsltu_vx_u8m2_b4_m | ( | ... | ) | __riscv_vmsltu_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsltu_vx_u8m4_b2 | ( | ... | ) | __riscv_vmsltu_vx_u8m4_b2(__VA_ARGS__) |
| #define vmsltu_vx_u8m4_b2_m | ( | ... | ) | __riscv_vmsltu_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsltu_vx_u8m8_b1 | ( | ... | ) | __riscv_vmsltu_vx_u8m8_b1(__VA_ARGS__) |
| #define vmsltu_vx_u8m8_b1_m | ( | ... | ) | __riscv_vmsltu_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsltu_vx_u8mf2_b16 | ( | ... | ) | __riscv_vmsltu_vx_u8mf2_b16(__VA_ARGS__) |
| #define vmsltu_vx_u8mf2_b16_m | ( | ... | ) | __riscv_vmsltu_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsltu_vx_u8mf4_b32 | ( | ... | ) | __riscv_vmsltu_vx_u8mf4_b32(__VA_ARGS__) |
| #define vmsltu_vx_u8mf4_b32_m | ( | ... | ) | __riscv_vmsltu_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsltu_vx_u8mf8_b64 | ( | ... | ) | __riscv_vmsltu_vx_u8mf8_b64(__VA_ARGS__) |
| #define vmsltu_vx_u8mf8_b64_m | ( | ... | ) | __riscv_vmsltu_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsne_vv_i16m1_b16 | ( | ... | ) | __riscv_vmsne_vv_i16m1_b16(__VA_ARGS__) |
| #define vmsne_vv_i16m1_b16_m | ( | ... | ) | __riscv_vmsne_vv_i16m1_b16_mu(__VA_ARGS__) |
| #define vmsne_vv_i16m2_b8 | ( | ... | ) | __riscv_vmsne_vv_i16m2_b8(__VA_ARGS__) |
| #define vmsne_vv_i16m2_b8_m | ( | ... | ) | __riscv_vmsne_vv_i16m2_b8_mu(__VA_ARGS__) |
| #define vmsne_vv_i16m4_b4 | ( | ... | ) | __riscv_vmsne_vv_i16m4_b4(__VA_ARGS__) |
| #define vmsne_vv_i16m4_b4_m | ( | ... | ) | __riscv_vmsne_vv_i16m4_b4_mu(__VA_ARGS__) |
| #define vmsne_vv_i16m8_b2 | ( | ... | ) | __riscv_vmsne_vv_i16m8_b2(__VA_ARGS__) |
| #define vmsne_vv_i16m8_b2_m | ( | ... | ) | __riscv_vmsne_vv_i16m8_b2_mu(__VA_ARGS__) |
| #define vmsne_vv_i16mf2_b32 | ( | ... | ) | __riscv_vmsne_vv_i16mf2_b32(__VA_ARGS__) |
| #define vmsne_vv_i16mf2_b32_m | ( | ... | ) | __riscv_vmsne_vv_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmsne_vv_i16mf4_b64 | ( | ... | ) | __riscv_vmsne_vv_i16mf4_b64(__VA_ARGS__) |
| #define vmsne_vv_i16mf4_b64_m | ( | ... | ) | __riscv_vmsne_vv_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmsne_vv_i32m1_b32 | ( | ... | ) | __riscv_vmsne_vv_i32m1_b32(__VA_ARGS__) |
| #define vmsne_vv_i32m1_b32_m | ( | ... | ) | __riscv_vmsne_vv_i32m1_b32_mu(__VA_ARGS__) |
| #define vmsne_vv_i32m2_b16 | ( | ... | ) | __riscv_vmsne_vv_i32m2_b16(__VA_ARGS__) |
| #define vmsne_vv_i32m2_b16_m | ( | ... | ) | __riscv_vmsne_vv_i32m2_b16_mu(__VA_ARGS__) |
| #define vmsne_vv_i32m4_b8 | ( | ... | ) | __riscv_vmsne_vv_i32m4_b8(__VA_ARGS__) |
| #define vmsne_vv_i32m4_b8_m | ( | ... | ) | __riscv_vmsne_vv_i32m4_b8_mu(__VA_ARGS__) |
| #define vmsne_vv_i32m8_b4 | ( | ... | ) | __riscv_vmsne_vv_i32m8_b4(__VA_ARGS__) |
| #define vmsne_vv_i32m8_b4_m | ( | ... | ) | __riscv_vmsne_vv_i32m8_b4_mu(__VA_ARGS__) |
| #define vmsne_vv_i32mf2_b64 | ( | ... | ) | __riscv_vmsne_vv_i32mf2_b64(__VA_ARGS__) |
| #define vmsne_vv_i32mf2_b64_m | ( | ... | ) | __riscv_vmsne_vv_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmsne_vv_i64m1_b64 | ( | ... | ) | __riscv_vmsne_vv_i64m1_b64(__VA_ARGS__) |
| #define vmsne_vv_i64m1_b64_m | ( | ... | ) | __riscv_vmsne_vv_i64m1_b64_mu(__VA_ARGS__) |
| #define vmsne_vv_i64m2_b32 | ( | ... | ) | __riscv_vmsne_vv_i64m2_b32(__VA_ARGS__) |
| #define vmsne_vv_i64m2_b32_m | ( | ... | ) | __riscv_vmsne_vv_i64m2_b32_mu(__VA_ARGS__) |
| #define vmsne_vv_i64m4_b16 | ( | ... | ) | __riscv_vmsne_vv_i64m4_b16(__VA_ARGS__) |
| #define vmsne_vv_i64m4_b16_m | ( | ... | ) | __riscv_vmsne_vv_i64m4_b16_mu(__VA_ARGS__) |
| #define vmsne_vv_i64m8_b8 | ( | ... | ) | __riscv_vmsne_vv_i64m8_b8(__VA_ARGS__) |
| #define vmsne_vv_i64m8_b8_m | ( | ... | ) | __riscv_vmsne_vv_i64m8_b8_mu(__VA_ARGS__) |
| #define vmsne_vv_i8m1_b8 | ( | ... | ) | __riscv_vmsne_vv_i8m1_b8(__VA_ARGS__) |
| #define vmsne_vv_i8m1_b8_m | ( | ... | ) | __riscv_vmsne_vv_i8m1_b8_mu(__VA_ARGS__) |
| #define vmsne_vv_i8m2_b4 | ( | ... | ) | __riscv_vmsne_vv_i8m2_b4(__VA_ARGS__) |
| #define vmsne_vv_i8m2_b4_m | ( | ... | ) | __riscv_vmsne_vv_i8m2_b4_mu(__VA_ARGS__) |
| #define vmsne_vv_i8m4_b2 | ( | ... | ) | __riscv_vmsne_vv_i8m4_b2(__VA_ARGS__) |
| #define vmsne_vv_i8m4_b2_m | ( | ... | ) | __riscv_vmsne_vv_i8m4_b2_mu(__VA_ARGS__) |
| #define vmsne_vv_i8m8_b1 | ( | ... | ) | __riscv_vmsne_vv_i8m8_b1(__VA_ARGS__) |
| #define vmsne_vv_i8m8_b1_m | ( | ... | ) | __riscv_vmsne_vv_i8m8_b1_mu(__VA_ARGS__) |
| #define vmsne_vv_i8mf2_b16 | ( | ... | ) | __riscv_vmsne_vv_i8mf2_b16(__VA_ARGS__) |
| #define vmsne_vv_i8mf2_b16_m | ( | ... | ) | __riscv_vmsne_vv_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmsne_vv_i8mf4_b32 | ( | ... | ) | __riscv_vmsne_vv_i8mf4_b32(__VA_ARGS__) |
| #define vmsne_vv_i8mf4_b32_m | ( | ... | ) | __riscv_vmsne_vv_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmsne_vv_i8mf8_b64 | ( | ... | ) | __riscv_vmsne_vv_i8mf8_b64(__VA_ARGS__) |
| #define vmsne_vv_i8mf8_b64_m | ( | ... | ) | __riscv_vmsne_vv_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsne_vv_u16m1_b16 | ( | ... | ) | __riscv_vmsne_vv_u16m1_b16(__VA_ARGS__) |
| #define vmsne_vv_u16m1_b16_m | ( | ... | ) | __riscv_vmsne_vv_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsne_vv_u16m2_b8 | ( | ... | ) | __riscv_vmsne_vv_u16m2_b8(__VA_ARGS__) |
| #define vmsne_vv_u16m2_b8_m | ( | ... | ) | __riscv_vmsne_vv_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsne_vv_u16m4_b4 | ( | ... | ) | __riscv_vmsne_vv_u16m4_b4(__VA_ARGS__) |
| #define vmsne_vv_u16m4_b4_m | ( | ... | ) | __riscv_vmsne_vv_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsne_vv_u16m8_b2 | ( | ... | ) | __riscv_vmsne_vv_u16m8_b2(__VA_ARGS__) |
| #define vmsne_vv_u16m8_b2_m | ( | ... | ) | __riscv_vmsne_vv_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsne_vv_u16mf2_b32 | ( | ... | ) | __riscv_vmsne_vv_u16mf2_b32(__VA_ARGS__) |
| #define vmsne_vv_u16mf2_b32_m | ( | ... | ) | __riscv_vmsne_vv_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsne_vv_u16mf4_b64 | ( | ... | ) | __riscv_vmsne_vv_u16mf4_b64(__VA_ARGS__) |
| #define vmsne_vv_u16mf4_b64_m | ( | ... | ) | __riscv_vmsne_vv_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsne_vv_u32m1_b32 | ( | ... | ) | __riscv_vmsne_vv_u32m1_b32(__VA_ARGS__) |
| #define vmsne_vv_u32m1_b32_m | ( | ... | ) | __riscv_vmsne_vv_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsne_vv_u32m2_b16 | ( | ... | ) | __riscv_vmsne_vv_u32m2_b16(__VA_ARGS__) |
| #define vmsne_vv_u32m2_b16_m | ( | ... | ) | __riscv_vmsne_vv_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsne_vv_u32m4_b8 | ( | ... | ) | __riscv_vmsne_vv_u32m4_b8(__VA_ARGS__) |
| #define vmsne_vv_u32m4_b8_m | ( | ... | ) | __riscv_vmsne_vv_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsne_vv_u32m8_b4 | ( | ... | ) | __riscv_vmsne_vv_u32m8_b4(__VA_ARGS__) |
| #define vmsne_vv_u32m8_b4_m | ( | ... | ) | __riscv_vmsne_vv_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsne_vv_u32mf2_b64 | ( | ... | ) | __riscv_vmsne_vv_u32mf2_b64(__VA_ARGS__) |
| #define vmsne_vv_u32mf2_b64_m | ( | ... | ) | __riscv_vmsne_vv_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsne_vv_u64m1_b64 | ( | ... | ) | __riscv_vmsne_vv_u64m1_b64(__VA_ARGS__) |
| #define vmsne_vv_u64m1_b64_m | ( | ... | ) | __riscv_vmsne_vv_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsne_vv_u64m2_b32 | ( | ... | ) | __riscv_vmsne_vv_u64m2_b32(__VA_ARGS__) |
| #define vmsne_vv_u64m2_b32_m | ( | ... | ) | __riscv_vmsne_vv_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsne_vv_u64m4_b16 | ( | ... | ) | __riscv_vmsne_vv_u64m4_b16(__VA_ARGS__) |
| #define vmsne_vv_u64m4_b16_m | ( | ... | ) | __riscv_vmsne_vv_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsne_vv_u64m8_b8 | ( | ... | ) | __riscv_vmsne_vv_u64m8_b8(__VA_ARGS__) |
| #define vmsne_vv_u64m8_b8_m | ( | ... | ) | __riscv_vmsne_vv_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsne_vv_u8m1_b8 | ( | ... | ) | __riscv_vmsne_vv_u8m1_b8(__VA_ARGS__) |
| #define vmsne_vv_u8m1_b8_m | ( | ... | ) | __riscv_vmsne_vv_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsne_vv_u8m2_b4 | ( | ... | ) | __riscv_vmsne_vv_u8m2_b4(__VA_ARGS__) |
| #define vmsne_vv_u8m2_b4_m | ( | ... | ) | __riscv_vmsne_vv_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsne_vv_u8m4_b2 | ( | ... | ) | __riscv_vmsne_vv_u8m4_b2(__VA_ARGS__) |
| #define vmsne_vv_u8m4_b2_m | ( | ... | ) | __riscv_vmsne_vv_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsne_vv_u8m8_b1 | ( | ... | ) | __riscv_vmsne_vv_u8m8_b1(__VA_ARGS__) |
| #define vmsne_vv_u8m8_b1_m | ( | ... | ) | __riscv_vmsne_vv_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsne_vv_u8mf2_b16 | ( | ... | ) | __riscv_vmsne_vv_u8mf2_b16(__VA_ARGS__) |
| #define vmsne_vv_u8mf2_b16_m | ( | ... | ) | __riscv_vmsne_vv_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsne_vv_u8mf4_b32 | ( | ... | ) | __riscv_vmsne_vv_u8mf4_b32(__VA_ARGS__) |
| #define vmsne_vv_u8mf4_b32_m | ( | ... | ) | __riscv_vmsne_vv_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsne_vv_u8mf8_b64 | ( | ... | ) | __riscv_vmsne_vv_u8mf8_b64(__VA_ARGS__) |
| #define vmsne_vv_u8mf8_b64_m | ( | ... | ) | __riscv_vmsne_vv_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsne_vx_i16m1_b16 | ( | ... | ) | __riscv_vmsne_vx_i16m1_b16(__VA_ARGS__) |
| #define vmsne_vx_i16m1_b16_m | ( | ... | ) | __riscv_vmsne_vx_i16m1_b16_mu(__VA_ARGS__) |
| #define vmsne_vx_i16m2_b8 | ( | ... | ) | __riscv_vmsne_vx_i16m2_b8(__VA_ARGS__) |
| #define vmsne_vx_i16m2_b8_m | ( | ... | ) | __riscv_vmsne_vx_i16m2_b8_mu(__VA_ARGS__) |
| #define vmsne_vx_i16m4_b4 | ( | ... | ) | __riscv_vmsne_vx_i16m4_b4(__VA_ARGS__) |
| #define vmsne_vx_i16m4_b4_m | ( | ... | ) | __riscv_vmsne_vx_i16m4_b4_mu(__VA_ARGS__) |
| #define vmsne_vx_i16m8_b2 | ( | ... | ) | __riscv_vmsne_vx_i16m8_b2(__VA_ARGS__) |
| #define vmsne_vx_i16m8_b2_m | ( | ... | ) | __riscv_vmsne_vx_i16m8_b2_mu(__VA_ARGS__) |
| #define vmsne_vx_i16mf2_b32 | ( | ... | ) | __riscv_vmsne_vx_i16mf2_b32(__VA_ARGS__) |
| #define vmsne_vx_i16mf2_b32_m | ( | ... | ) | __riscv_vmsne_vx_i16mf2_b32_mu(__VA_ARGS__) |
| #define vmsne_vx_i16mf4_b64 | ( | ... | ) | __riscv_vmsne_vx_i16mf4_b64(__VA_ARGS__) |
| #define vmsne_vx_i16mf4_b64_m | ( | ... | ) | __riscv_vmsne_vx_i16mf4_b64_mu(__VA_ARGS__) |
| #define vmsne_vx_i32m1_b32 | ( | ... | ) | __riscv_vmsne_vx_i32m1_b32(__VA_ARGS__) |
| #define vmsne_vx_i32m1_b32_m | ( | ... | ) | __riscv_vmsne_vx_i32m1_b32_mu(__VA_ARGS__) |
| #define vmsne_vx_i32m2_b16 | ( | ... | ) | __riscv_vmsne_vx_i32m2_b16(__VA_ARGS__) |
| #define vmsne_vx_i32m2_b16_m | ( | ... | ) | __riscv_vmsne_vx_i32m2_b16_mu(__VA_ARGS__) |
| #define vmsne_vx_i32m4_b8 | ( | ... | ) | __riscv_vmsne_vx_i32m4_b8(__VA_ARGS__) |
| #define vmsne_vx_i32m4_b8_m | ( | ... | ) | __riscv_vmsne_vx_i32m4_b8_mu(__VA_ARGS__) |
| #define vmsne_vx_i32m8_b4 | ( | ... | ) | __riscv_vmsne_vx_i32m8_b4(__VA_ARGS__) |
| #define vmsne_vx_i32m8_b4_m | ( | ... | ) | __riscv_vmsne_vx_i32m8_b4_mu(__VA_ARGS__) |
| #define vmsne_vx_i32mf2_b64 | ( | ... | ) | __riscv_vmsne_vx_i32mf2_b64(__VA_ARGS__) |
| #define vmsne_vx_i32mf2_b64_m | ( | ... | ) | __riscv_vmsne_vx_i32mf2_b64_mu(__VA_ARGS__) |
| #define vmsne_vx_i64m1_b64 | ( | ... | ) | __riscv_vmsne_vx_i64m1_b64(__VA_ARGS__) |
| #define vmsne_vx_i64m1_b64_m | ( | ... | ) | __riscv_vmsne_vx_i64m1_b64_mu(__VA_ARGS__) |
| #define vmsne_vx_i64m2_b32 | ( | ... | ) | __riscv_vmsne_vx_i64m2_b32(__VA_ARGS__) |
| #define vmsne_vx_i64m2_b32_m | ( | ... | ) | __riscv_vmsne_vx_i64m2_b32_mu(__VA_ARGS__) |
| #define vmsne_vx_i64m4_b16 | ( | ... | ) | __riscv_vmsne_vx_i64m4_b16(__VA_ARGS__) |
| #define vmsne_vx_i64m4_b16_m | ( | ... | ) | __riscv_vmsne_vx_i64m4_b16_mu(__VA_ARGS__) |
| #define vmsne_vx_i64m8_b8 | ( | ... | ) | __riscv_vmsne_vx_i64m8_b8(__VA_ARGS__) |
| #define vmsne_vx_i64m8_b8_m | ( | ... | ) | __riscv_vmsne_vx_i64m8_b8_mu(__VA_ARGS__) |
| #define vmsne_vx_i8m1_b8 | ( | ... | ) | __riscv_vmsne_vx_i8m1_b8(__VA_ARGS__) |
| #define vmsne_vx_i8m1_b8_m | ( | ... | ) | __riscv_vmsne_vx_i8m1_b8_mu(__VA_ARGS__) |
| #define vmsne_vx_i8m2_b4 | ( | ... | ) | __riscv_vmsne_vx_i8m2_b4(__VA_ARGS__) |
| #define vmsne_vx_i8m2_b4_m | ( | ... | ) | __riscv_vmsne_vx_i8m2_b4_mu(__VA_ARGS__) |
| #define vmsne_vx_i8m4_b2 | ( | ... | ) | __riscv_vmsne_vx_i8m4_b2(__VA_ARGS__) |
| #define vmsne_vx_i8m4_b2_m | ( | ... | ) | __riscv_vmsne_vx_i8m4_b2_mu(__VA_ARGS__) |
| #define vmsne_vx_i8m8_b1 | ( | ... | ) | __riscv_vmsne_vx_i8m8_b1(__VA_ARGS__) |
| #define vmsne_vx_i8m8_b1_m | ( | ... | ) | __riscv_vmsne_vx_i8m8_b1_mu(__VA_ARGS__) |
| #define vmsne_vx_i8mf2_b16 | ( | ... | ) | __riscv_vmsne_vx_i8mf2_b16(__VA_ARGS__) |
| #define vmsne_vx_i8mf2_b16_m | ( | ... | ) | __riscv_vmsne_vx_i8mf2_b16_mu(__VA_ARGS__) |
| #define vmsne_vx_i8mf4_b32 | ( | ... | ) | __riscv_vmsne_vx_i8mf4_b32(__VA_ARGS__) |
| #define vmsne_vx_i8mf4_b32_m | ( | ... | ) | __riscv_vmsne_vx_i8mf4_b32_mu(__VA_ARGS__) |
| #define vmsne_vx_i8mf8_b64 | ( | ... | ) | __riscv_vmsne_vx_i8mf8_b64(__VA_ARGS__) |
| #define vmsne_vx_i8mf8_b64_m | ( | ... | ) | __riscv_vmsne_vx_i8mf8_b64_mu(__VA_ARGS__) |
| #define vmsne_vx_u16m1_b16 | ( | ... | ) | __riscv_vmsne_vx_u16m1_b16(__VA_ARGS__) |
| #define vmsne_vx_u16m1_b16_m | ( | ... | ) | __riscv_vmsne_vx_u16m1_b16_mu(__VA_ARGS__) |
| #define vmsne_vx_u16m2_b8 | ( | ... | ) | __riscv_vmsne_vx_u16m2_b8(__VA_ARGS__) |
| #define vmsne_vx_u16m2_b8_m | ( | ... | ) | __riscv_vmsne_vx_u16m2_b8_mu(__VA_ARGS__) |
| #define vmsne_vx_u16m4_b4 | ( | ... | ) | __riscv_vmsne_vx_u16m4_b4(__VA_ARGS__) |
| #define vmsne_vx_u16m4_b4_m | ( | ... | ) | __riscv_vmsne_vx_u16m4_b4_mu(__VA_ARGS__) |
| #define vmsne_vx_u16m8_b2 | ( | ... | ) | __riscv_vmsne_vx_u16m8_b2(__VA_ARGS__) |
| #define vmsne_vx_u16m8_b2_m | ( | ... | ) | __riscv_vmsne_vx_u16m8_b2_mu(__VA_ARGS__) |
| #define vmsne_vx_u16mf2_b32 | ( | ... | ) | __riscv_vmsne_vx_u16mf2_b32(__VA_ARGS__) |
| #define vmsne_vx_u16mf2_b32_m | ( | ... | ) | __riscv_vmsne_vx_u16mf2_b32_mu(__VA_ARGS__) |
| #define vmsne_vx_u16mf4_b64 | ( | ... | ) | __riscv_vmsne_vx_u16mf4_b64(__VA_ARGS__) |
| #define vmsne_vx_u16mf4_b64_m | ( | ... | ) | __riscv_vmsne_vx_u16mf4_b64_mu(__VA_ARGS__) |
| #define vmsne_vx_u32m1_b32 | ( | ... | ) | __riscv_vmsne_vx_u32m1_b32(__VA_ARGS__) |
| #define vmsne_vx_u32m1_b32_m | ( | ... | ) | __riscv_vmsne_vx_u32m1_b32_mu(__VA_ARGS__) |
| #define vmsne_vx_u32m2_b16 | ( | ... | ) | __riscv_vmsne_vx_u32m2_b16(__VA_ARGS__) |
| #define vmsne_vx_u32m2_b16_m | ( | ... | ) | __riscv_vmsne_vx_u32m2_b16_mu(__VA_ARGS__) |
| #define vmsne_vx_u32m4_b8 | ( | ... | ) | __riscv_vmsne_vx_u32m4_b8(__VA_ARGS__) |
| #define vmsne_vx_u32m4_b8_m | ( | ... | ) | __riscv_vmsne_vx_u32m4_b8_mu(__VA_ARGS__) |
| #define vmsne_vx_u32m8_b4 | ( | ... | ) | __riscv_vmsne_vx_u32m8_b4(__VA_ARGS__) |
| #define vmsne_vx_u32m8_b4_m | ( | ... | ) | __riscv_vmsne_vx_u32m8_b4_mu(__VA_ARGS__) |
| #define vmsne_vx_u32mf2_b64 | ( | ... | ) | __riscv_vmsne_vx_u32mf2_b64(__VA_ARGS__) |
| #define vmsne_vx_u32mf2_b64_m | ( | ... | ) | __riscv_vmsne_vx_u32mf2_b64_mu(__VA_ARGS__) |
| #define vmsne_vx_u64m1_b64 | ( | ... | ) | __riscv_vmsne_vx_u64m1_b64(__VA_ARGS__) |
| #define vmsne_vx_u64m1_b64_m | ( | ... | ) | __riscv_vmsne_vx_u64m1_b64_mu(__VA_ARGS__) |
| #define vmsne_vx_u64m2_b32 | ( | ... | ) | __riscv_vmsne_vx_u64m2_b32(__VA_ARGS__) |
| #define vmsne_vx_u64m2_b32_m | ( | ... | ) | __riscv_vmsne_vx_u64m2_b32_mu(__VA_ARGS__) |
| #define vmsne_vx_u64m4_b16 | ( | ... | ) | __riscv_vmsne_vx_u64m4_b16(__VA_ARGS__) |
| #define vmsne_vx_u64m4_b16_m | ( | ... | ) | __riscv_vmsne_vx_u64m4_b16_mu(__VA_ARGS__) |
| #define vmsne_vx_u64m8_b8 | ( | ... | ) | __riscv_vmsne_vx_u64m8_b8(__VA_ARGS__) |
| #define vmsne_vx_u64m8_b8_m | ( | ... | ) | __riscv_vmsne_vx_u64m8_b8_mu(__VA_ARGS__) |
| #define vmsne_vx_u8m1_b8 | ( | ... | ) | __riscv_vmsne_vx_u8m1_b8(__VA_ARGS__) |
| #define vmsne_vx_u8m1_b8_m | ( | ... | ) | __riscv_vmsne_vx_u8m1_b8_mu(__VA_ARGS__) |
| #define vmsne_vx_u8m2_b4 | ( | ... | ) | __riscv_vmsne_vx_u8m2_b4(__VA_ARGS__) |
| #define vmsne_vx_u8m2_b4_m | ( | ... | ) | __riscv_vmsne_vx_u8m2_b4_mu(__VA_ARGS__) |
| #define vmsne_vx_u8m4_b2 | ( | ... | ) | __riscv_vmsne_vx_u8m4_b2(__VA_ARGS__) |
| #define vmsne_vx_u8m4_b2_m | ( | ... | ) | __riscv_vmsne_vx_u8m4_b2_mu(__VA_ARGS__) |
| #define vmsne_vx_u8m8_b1 | ( | ... | ) | __riscv_vmsne_vx_u8m8_b1(__VA_ARGS__) |
| #define vmsne_vx_u8m8_b1_m | ( | ... | ) | __riscv_vmsne_vx_u8m8_b1_mu(__VA_ARGS__) |
| #define vmsne_vx_u8mf2_b16 | ( | ... | ) | __riscv_vmsne_vx_u8mf2_b16(__VA_ARGS__) |
| #define vmsne_vx_u8mf2_b16_m | ( | ... | ) | __riscv_vmsne_vx_u8mf2_b16_mu(__VA_ARGS__) |
| #define vmsne_vx_u8mf4_b32 | ( | ... | ) | __riscv_vmsne_vx_u8mf4_b32(__VA_ARGS__) |
| #define vmsne_vx_u8mf4_b32_m | ( | ... | ) | __riscv_vmsne_vx_u8mf4_b32_mu(__VA_ARGS__) |
| #define vmsne_vx_u8mf8_b64 | ( | ... | ) | __riscv_vmsne_vx_u8mf8_b64(__VA_ARGS__) |
| #define vmsne_vx_u8mf8_b64_m | ( | ... | ) | __riscv_vmsne_vx_u8mf8_b64_mu(__VA_ARGS__) |
| #define vmsof_m_b1 | ( | ... | ) | __riscv_vmsof_m_b1(__VA_ARGS__) |
| #define vmsof_m_b16 | ( | ... | ) | __riscv_vmsof_m_b16(__VA_ARGS__) |
| #define vmsof_m_b16_m | ( | ... | ) | __riscv_vmsof_m_b16_mu(__VA_ARGS__) |
| #define vmsof_m_b1_m | ( | ... | ) | __riscv_vmsof_m_b1_mu(__VA_ARGS__) |
| #define vmsof_m_b2 | ( | ... | ) | __riscv_vmsof_m_b2(__VA_ARGS__) |
| #define vmsof_m_b2_m | ( | ... | ) | __riscv_vmsof_m_b2_mu(__VA_ARGS__) |
| #define vmsof_m_b32 | ( | ... | ) | __riscv_vmsof_m_b32(__VA_ARGS__) |
| #define vmsof_m_b32_m | ( | ... | ) | __riscv_vmsof_m_b32_mu(__VA_ARGS__) |
| #define vmsof_m_b4 | ( | ... | ) | __riscv_vmsof_m_b4(__VA_ARGS__) |
| #define vmsof_m_b4_m | ( | ... | ) | __riscv_vmsof_m_b4_mu(__VA_ARGS__) |
| #define vmsof_m_b64 | ( | ... | ) | __riscv_vmsof_m_b64(__VA_ARGS__) |
| #define vmsof_m_b64_m | ( | ... | ) | __riscv_vmsof_m_b64_mu(__VA_ARGS__) |
| #define vmsof_m_b8 | ( | ... | ) | __riscv_vmsof_m_b8(__VA_ARGS__) |
| #define vmsof_m_b8_m | ( | ... | ) | __riscv_vmsof_m_b8_mu(__VA_ARGS__) |
| #define vmul_vv_i16m1 | ( | ... | ) | __riscv_vmul_vv_i16m1(__VA_ARGS__) |
| #define vmul_vv_i16m1_m | ( | ... | ) | __riscv_vmul_vv_i16m1_tumu(__VA_ARGS__) |
| #define vmul_vv_i16m2 | ( | ... | ) | __riscv_vmul_vv_i16m2(__VA_ARGS__) |
| #define vmul_vv_i16m2_m | ( | ... | ) | __riscv_vmul_vv_i16m2_tumu(__VA_ARGS__) |
| #define vmul_vv_i16m4 | ( | ... | ) | __riscv_vmul_vv_i16m4(__VA_ARGS__) |
| #define vmul_vv_i16m4_m | ( | ... | ) | __riscv_vmul_vv_i16m4_tumu(__VA_ARGS__) |
| #define vmul_vv_i16m8 | ( | ... | ) | __riscv_vmul_vv_i16m8(__VA_ARGS__) |
| #define vmul_vv_i16m8_m | ( | ... | ) | __riscv_vmul_vv_i16m8_tumu(__VA_ARGS__) |
| #define vmul_vv_i16mf2 | ( | ... | ) | __riscv_vmul_vv_i16mf2(__VA_ARGS__) |
| #define vmul_vv_i16mf2_m | ( | ... | ) | __riscv_vmul_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vmul_vv_i16mf4 | ( | ... | ) | __riscv_vmul_vv_i16mf4(__VA_ARGS__) |
| #define vmul_vv_i16mf4_m | ( | ... | ) | __riscv_vmul_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vmul_vv_i32m1 | ( | ... | ) | __riscv_vmul_vv_i32m1(__VA_ARGS__) |
| #define vmul_vv_i32m1_m | ( | ... | ) | __riscv_vmul_vv_i32m1_tumu(__VA_ARGS__) |
| #define vmul_vv_i32m2 | ( | ... | ) | __riscv_vmul_vv_i32m2(__VA_ARGS__) |
| #define vmul_vv_i32m2_m | ( | ... | ) | __riscv_vmul_vv_i32m2_tumu(__VA_ARGS__) |
| #define vmul_vv_i32m4 | ( | ... | ) | __riscv_vmul_vv_i32m4(__VA_ARGS__) |
| #define vmul_vv_i32m4_m | ( | ... | ) | __riscv_vmul_vv_i32m4_tumu(__VA_ARGS__) |
| #define vmul_vv_i32m8 | ( | ... | ) | __riscv_vmul_vv_i32m8(__VA_ARGS__) |
| #define vmul_vv_i32m8_m | ( | ... | ) | __riscv_vmul_vv_i32m8_tumu(__VA_ARGS__) |
| #define vmul_vv_i32mf2 | ( | ... | ) | __riscv_vmul_vv_i32mf2(__VA_ARGS__) |
| #define vmul_vv_i32mf2_m | ( | ... | ) | __riscv_vmul_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vmul_vv_i64m1 | ( | ... | ) | __riscv_vmul_vv_i64m1(__VA_ARGS__) |
| #define vmul_vv_i64m1_m | ( | ... | ) | __riscv_vmul_vv_i64m1_tumu(__VA_ARGS__) |
| #define vmul_vv_i64m2 | ( | ... | ) | __riscv_vmul_vv_i64m2(__VA_ARGS__) |
| #define vmul_vv_i64m2_m | ( | ... | ) | __riscv_vmul_vv_i64m2_tumu(__VA_ARGS__) |
| #define vmul_vv_i64m4 | ( | ... | ) | __riscv_vmul_vv_i64m4(__VA_ARGS__) |
| #define vmul_vv_i64m4_m | ( | ... | ) | __riscv_vmul_vv_i64m4_tumu(__VA_ARGS__) |
| #define vmul_vv_i64m8 | ( | ... | ) | __riscv_vmul_vv_i64m8(__VA_ARGS__) |
| #define vmul_vv_i64m8_m | ( | ... | ) | __riscv_vmul_vv_i64m8_tumu(__VA_ARGS__) |
| #define vmul_vv_i8m1 | ( | ... | ) | __riscv_vmul_vv_i8m1(__VA_ARGS__) |
| #define vmul_vv_i8m1_m | ( | ... | ) | __riscv_vmul_vv_i8m1_tumu(__VA_ARGS__) |
| #define vmul_vv_i8m2 | ( | ... | ) | __riscv_vmul_vv_i8m2(__VA_ARGS__) |
| #define vmul_vv_i8m2_m | ( | ... | ) | __riscv_vmul_vv_i8m2_tumu(__VA_ARGS__) |
| #define vmul_vv_i8m4 | ( | ... | ) | __riscv_vmul_vv_i8m4(__VA_ARGS__) |
| #define vmul_vv_i8m4_m | ( | ... | ) | __riscv_vmul_vv_i8m4_tumu(__VA_ARGS__) |
| #define vmul_vv_i8m8 | ( | ... | ) | __riscv_vmul_vv_i8m8(__VA_ARGS__) |
| #define vmul_vv_i8m8_m | ( | ... | ) | __riscv_vmul_vv_i8m8_tumu(__VA_ARGS__) |
| #define vmul_vv_i8mf2 | ( | ... | ) | __riscv_vmul_vv_i8mf2(__VA_ARGS__) |
| #define vmul_vv_i8mf2_m | ( | ... | ) | __riscv_vmul_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vmul_vv_i8mf4 | ( | ... | ) | __riscv_vmul_vv_i8mf4(__VA_ARGS__) |
| #define vmul_vv_i8mf4_m | ( | ... | ) | __riscv_vmul_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vmul_vv_i8mf8 | ( | ... | ) | __riscv_vmul_vv_i8mf8(__VA_ARGS__) |
| #define vmul_vv_i8mf8_m | ( | ... | ) | __riscv_vmul_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vmul_vv_u16m1 | ( | ... | ) | __riscv_vmul_vv_u16m1(__VA_ARGS__) |
| #define vmul_vv_u16m1_m | ( | ... | ) | __riscv_vmul_vv_u16m1_tumu(__VA_ARGS__) |
| #define vmul_vv_u16m2 | ( | ... | ) | __riscv_vmul_vv_u16m2(__VA_ARGS__) |
| #define vmul_vv_u16m2_m | ( | ... | ) | __riscv_vmul_vv_u16m2_tumu(__VA_ARGS__) |
| #define vmul_vv_u16m4 | ( | ... | ) | __riscv_vmul_vv_u16m4(__VA_ARGS__) |
| #define vmul_vv_u16m4_m | ( | ... | ) | __riscv_vmul_vv_u16m4_tumu(__VA_ARGS__) |
| #define vmul_vv_u16m8 | ( | ... | ) | __riscv_vmul_vv_u16m8(__VA_ARGS__) |
| #define vmul_vv_u16m8_m | ( | ... | ) | __riscv_vmul_vv_u16m8_tumu(__VA_ARGS__) |
| #define vmul_vv_u16mf2 | ( | ... | ) | __riscv_vmul_vv_u16mf2(__VA_ARGS__) |
| #define vmul_vv_u16mf2_m | ( | ... | ) | __riscv_vmul_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vmul_vv_u16mf4 | ( | ... | ) | __riscv_vmul_vv_u16mf4(__VA_ARGS__) |
| #define vmul_vv_u16mf4_m | ( | ... | ) | __riscv_vmul_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vmul_vv_u32m1 | ( | ... | ) | __riscv_vmul_vv_u32m1(__VA_ARGS__) |
| #define vmul_vv_u32m1_m | ( | ... | ) | __riscv_vmul_vv_u32m1_tumu(__VA_ARGS__) |
| #define vmul_vv_u32m2 | ( | ... | ) | __riscv_vmul_vv_u32m2(__VA_ARGS__) |
| #define vmul_vv_u32m2_m | ( | ... | ) | __riscv_vmul_vv_u32m2_tumu(__VA_ARGS__) |
| #define vmul_vv_u32m4 | ( | ... | ) | __riscv_vmul_vv_u32m4(__VA_ARGS__) |
| #define vmul_vv_u32m4_m | ( | ... | ) | __riscv_vmul_vv_u32m4_tumu(__VA_ARGS__) |
| #define vmul_vv_u32m8 | ( | ... | ) | __riscv_vmul_vv_u32m8(__VA_ARGS__) |
| #define vmul_vv_u32m8_m | ( | ... | ) | __riscv_vmul_vv_u32m8_tumu(__VA_ARGS__) |
| #define vmul_vv_u32mf2 | ( | ... | ) | __riscv_vmul_vv_u32mf2(__VA_ARGS__) |
| #define vmul_vv_u32mf2_m | ( | ... | ) | __riscv_vmul_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vmul_vv_u64m1 | ( | ... | ) | __riscv_vmul_vv_u64m1(__VA_ARGS__) |
| #define vmul_vv_u64m1_m | ( | ... | ) | __riscv_vmul_vv_u64m1_tumu(__VA_ARGS__) |
| #define vmul_vv_u64m2 | ( | ... | ) | __riscv_vmul_vv_u64m2(__VA_ARGS__) |
| #define vmul_vv_u64m2_m | ( | ... | ) | __riscv_vmul_vv_u64m2_tumu(__VA_ARGS__) |
| #define vmul_vv_u64m4 | ( | ... | ) | __riscv_vmul_vv_u64m4(__VA_ARGS__) |
| #define vmul_vv_u64m4_m | ( | ... | ) | __riscv_vmul_vv_u64m4_tumu(__VA_ARGS__) |
| #define vmul_vv_u64m8 | ( | ... | ) | __riscv_vmul_vv_u64m8(__VA_ARGS__) |
| #define vmul_vv_u64m8_m | ( | ... | ) | __riscv_vmul_vv_u64m8_tumu(__VA_ARGS__) |
| #define vmul_vv_u8m1 | ( | ... | ) | __riscv_vmul_vv_u8m1(__VA_ARGS__) |
| #define vmul_vv_u8m1_m | ( | ... | ) | __riscv_vmul_vv_u8m1_tumu(__VA_ARGS__) |
| #define vmul_vv_u8m2 | ( | ... | ) | __riscv_vmul_vv_u8m2(__VA_ARGS__) |
| #define vmul_vv_u8m2_m | ( | ... | ) | __riscv_vmul_vv_u8m2_tumu(__VA_ARGS__) |
| #define vmul_vv_u8m4 | ( | ... | ) | __riscv_vmul_vv_u8m4(__VA_ARGS__) |
| #define vmul_vv_u8m4_m | ( | ... | ) | __riscv_vmul_vv_u8m4_tumu(__VA_ARGS__) |
| #define vmul_vv_u8m8 | ( | ... | ) | __riscv_vmul_vv_u8m8(__VA_ARGS__) |
| #define vmul_vv_u8m8_m | ( | ... | ) | __riscv_vmul_vv_u8m8_tumu(__VA_ARGS__) |
| #define vmul_vv_u8mf2 | ( | ... | ) | __riscv_vmul_vv_u8mf2(__VA_ARGS__) |
| #define vmul_vv_u8mf2_m | ( | ... | ) | __riscv_vmul_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vmul_vv_u8mf4 | ( | ... | ) | __riscv_vmul_vv_u8mf4(__VA_ARGS__) |
| #define vmul_vv_u8mf4_m | ( | ... | ) | __riscv_vmul_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vmul_vv_u8mf8 | ( | ... | ) | __riscv_vmul_vv_u8mf8(__VA_ARGS__) |
| #define vmul_vv_u8mf8_m | ( | ... | ) | __riscv_vmul_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vmul_vx_i16m1 | ( | ... | ) | __riscv_vmul_vx_i16m1(__VA_ARGS__) |
| #define vmul_vx_i16m1_m | ( | ... | ) | __riscv_vmul_vx_i16m1_tumu(__VA_ARGS__) |
| #define vmul_vx_i16m2 | ( | ... | ) | __riscv_vmul_vx_i16m2(__VA_ARGS__) |
| #define vmul_vx_i16m2_m | ( | ... | ) | __riscv_vmul_vx_i16m2_tumu(__VA_ARGS__) |
| #define vmul_vx_i16m4 | ( | ... | ) | __riscv_vmul_vx_i16m4(__VA_ARGS__) |
| #define vmul_vx_i16m4_m | ( | ... | ) | __riscv_vmul_vx_i16m4_tumu(__VA_ARGS__) |
| #define vmul_vx_i16m8 | ( | ... | ) | __riscv_vmul_vx_i16m8(__VA_ARGS__) |
| #define vmul_vx_i16m8_m | ( | ... | ) | __riscv_vmul_vx_i16m8_tumu(__VA_ARGS__) |
| #define vmul_vx_i16mf2 | ( | ... | ) | __riscv_vmul_vx_i16mf2(__VA_ARGS__) |
| #define vmul_vx_i16mf2_m | ( | ... | ) | __riscv_vmul_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vmul_vx_i16mf4 | ( | ... | ) | __riscv_vmul_vx_i16mf4(__VA_ARGS__) |
| #define vmul_vx_i16mf4_m | ( | ... | ) | __riscv_vmul_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vmul_vx_i32m1 | ( | ... | ) | __riscv_vmul_vx_i32m1(__VA_ARGS__) |
| #define vmul_vx_i32m1_m | ( | ... | ) | __riscv_vmul_vx_i32m1_tumu(__VA_ARGS__) |
| #define vmul_vx_i32m2 | ( | ... | ) | __riscv_vmul_vx_i32m2(__VA_ARGS__) |
| #define vmul_vx_i32m2_m | ( | ... | ) | __riscv_vmul_vx_i32m2_tumu(__VA_ARGS__) |
| #define vmul_vx_i32m4 | ( | ... | ) | __riscv_vmul_vx_i32m4(__VA_ARGS__) |
| #define vmul_vx_i32m4_m | ( | ... | ) | __riscv_vmul_vx_i32m4_tumu(__VA_ARGS__) |
| #define vmul_vx_i32m8 | ( | ... | ) | __riscv_vmul_vx_i32m8(__VA_ARGS__) |
| #define vmul_vx_i32m8_m | ( | ... | ) | __riscv_vmul_vx_i32m8_tumu(__VA_ARGS__) |
| #define vmul_vx_i32mf2 | ( | ... | ) | __riscv_vmul_vx_i32mf2(__VA_ARGS__) |
| #define vmul_vx_i32mf2_m | ( | ... | ) | __riscv_vmul_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vmul_vx_i64m1 | ( | ... | ) | __riscv_vmul_vx_i64m1(__VA_ARGS__) |
| #define vmul_vx_i64m1_m | ( | ... | ) | __riscv_vmul_vx_i64m1_tumu(__VA_ARGS__) |
| #define vmul_vx_i64m2 | ( | ... | ) | __riscv_vmul_vx_i64m2(__VA_ARGS__) |
| #define vmul_vx_i64m2_m | ( | ... | ) | __riscv_vmul_vx_i64m2_tumu(__VA_ARGS__) |
| #define vmul_vx_i64m4 | ( | ... | ) | __riscv_vmul_vx_i64m4(__VA_ARGS__) |
| #define vmul_vx_i64m4_m | ( | ... | ) | __riscv_vmul_vx_i64m4_tumu(__VA_ARGS__) |
| #define vmul_vx_i64m8 | ( | ... | ) | __riscv_vmul_vx_i64m8(__VA_ARGS__) |
| #define vmul_vx_i64m8_m | ( | ... | ) | __riscv_vmul_vx_i64m8_tumu(__VA_ARGS__) |
| #define vmul_vx_i8m1 | ( | ... | ) | __riscv_vmul_vx_i8m1(__VA_ARGS__) |
| #define vmul_vx_i8m1_m | ( | ... | ) | __riscv_vmul_vx_i8m1_tumu(__VA_ARGS__) |
| #define vmul_vx_i8m2 | ( | ... | ) | __riscv_vmul_vx_i8m2(__VA_ARGS__) |
| #define vmul_vx_i8m2_m | ( | ... | ) | __riscv_vmul_vx_i8m2_tumu(__VA_ARGS__) |
| #define vmul_vx_i8m4 | ( | ... | ) | __riscv_vmul_vx_i8m4(__VA_ARGS__) |
| #define vmul_vx_i8m4_m | ( | ... | ) | __riscv_vmul_vx_i8m4_tumu(__VA_ARGS__) |
| #define vmul_vx_i8m8 | ( | ... | ) | __riscv_vmul_vx_i8m8(__VA_ARGS__) |
| #define vmul_vx_i8m8_m | ( | ... | ) | __riscv_vmul_vx_i8m8_tumu(__VA_ARGS__) |
| #define vmul_vx_i8mf2 | ( | ... | ) | __riscv_vmul_vx_i8mf2(__VA_ARGS__) |
| #define vmul_vx_i8mf2_m | ( | ... | ) | __riscv_vmul_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vmul_vx_i8mf4 | ( | ... | ) | __riscv_vmul_vx_i8mf4(__VA_ARGS__) |
| #define vmul_vx_i8mf4_m | ( | ... | ) | __riscv_vmul_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vmul_vx_i8mf8 | ( | ... | ) | __riscv_vmul_vx_i8mf8(__VA_ARGS__) |
| #define vmul_vx_i8mf8_m | ( | ... | ) | __riscv_vmul_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vmul_vx_u16m1 | ( | ... | ) | __riscv_vmul_vx_u16m1(__VA_ARGS__) |
| #define vmul_vx_u16m1_m | ( | ... | ) | __riscv_vmul_vx_u16m1_tumu(__VA_ARGS__) |
| #define vmul_vx_u16m2 | ( | ... | ) | __riscv_vmul_vx_u16m2(__VA_ARGS__) |
| #define vmul_vx_u16m2_m | ( | ... | ) | __riscv_vmul_vx_u16m2_tumu(__VA_ARGS__) |
| #define vmul_vx_u16m4 | ( | ... | ) | __riscv_vmul_vx_u16m4(__VA_ARGS__) |
| #define vmul_vx_u16m4_m | ( | ... | ) | __riscv_vmul_vx_u16m4_tumu(__VA_ARGS__) |
| #define vmul_vx_u16m8 | ( | ... | ) | __riscv_vmul_vx_u16m8(__VA_ARGS__) |
| #define vmul_vx_u16m8_m | ( | ... | ) | __riscv_vmul_vx_u16m8_tumu(__VA_ARGS__) |
| #define vmul_vx_u16mf2 | ( | ... | ) | __riscv_vmul_vx_u16mf2(__VA_ARGS__) |
| #define vmul_vx_u16mf2_m | ( | ... | ) | __riscv_vmul_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vmul_vx_u16mf4 | ( | ... | ) | __riscv_vmul_vx_u16mf4(__VA_ARGS__) |
| #define vmul_vx_u16mf4_m | ( | ... | ) | __riscv_vmul_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vmul_vx_u32m1 | ( | ... | ) | __riscv_vmul_vx_u32m1(__VA_ARGS__) |
| #define vmul_vx_u32m1_m | ( | ... | ) | __riscv_vmul_vx_u32m1_tumu(__VA_ARGS__) |
| #define vmul_vx_u32m2 | ( | ... | ) | __riscv_vmul_vx_u32m2(__VA_ARGS__) |
| #define vmul_vx_u32m2_m | ( | ... | ) | __riscv_vmul_vx_u32m2_tumu(__VA_ARGS__) |
| #define vmul_vx_u32m4 | ( | ... | ) | __riscv_vmul_vx_u32m4(__VA_ARGS__) |
| #define vmul_vx_u32m4_m | ( | ... | ) | __riscv_vmul_vx_u32m4_tumu(__VA_ARGS__) |
| #define vmul_vx_u32m8 | ( | ... | ) | __riscv_vmul_vx_u32m8(__VA_ARGS__) |
| #define vmul_vx_u32m8_m | ( | ... | ) | __riscv_vmul_vx_u32m8_tumu(__VA_ARGS__) |
| #define vmul_vx_u32mf2 | ( | ... | ) | __riscv_vmul_vx_u32mf2(__VA_ARGS__) |
| #define vmul_vx_u32mf2_m | ( | ... | ) | __riscv_vmul_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vmul_vx_u64m1 | ( | ... | ) | __riscv_vmul_vx_u64m1(__VA_ARGS__) |
| #define vmul_vx_u64m1_m | ( | ... | ) | __riscv_vmul_vx_u64m1_tumu(__VA_ARGS__) |
| #define vmul_vx_u64m2 | ( | ... | ) | __riscv_vmul_vx_u64m2(__VA_ARGS__) |
| #define vmul_vx_u64m2_m | ( | ... | ) | __riscv_vmul_vx_u64m2_tumu(__VA_ARGS__) |
| #define vmul_vx_u64m4 | ( | ... | ) | __riscv_vmul_vx_u64m4(__VA_ARGS__) |
| #define vmul_vx_u64m4_m | ( | ... | ) | __riscv_vmul_vx_u64m4_tumu(__VA_ARGS__) |
| #define vmul_vx_u64m8 | ( | ... | ) | __riscv_vmul_vx_u64m8(__VA_ARGS__) |
| #define vmul_vx_u64m8_m | ( | ... | ) | __riscv_vmul_vx_u64m8_tumu(__VA_ARGS__) |
| #define vmul_vx_u8m1 | ( | ... | ) | __riscv_vmul_vx_u8m1(__VA_ARGS__) |
| #define vmul_vx_u8m1_m | ( | ... | ) | __riscv_vmul_vx_u8m1_tumu(__VA_ARGS__) |
| #define vmul_vx_u8m2 | ( | ... | ) | __riscv_vmul_vx_u8m2(__VA_ARGS__) |
| #define vmul_vx_u8m2_m | ( | ... | ) | __riscv_vmul_vx_u8m2_tumu(__VA_ARGS__) |
| #define vmul_vx_u8m4 | ( | ... | ) | __riscv_vmul_vx_u8m4(__VA_ARGS__) |
| #define vmul_vx_u8m4_m | ( | ... | ) | __riscv_vmul_vx_u8m4_tumu(__VA_ARGS__) |
| #define vmul_vx_u8m8 | ( | ... | ) | __riscv_vmul_vx_u8m8(__VA_ARGS__) |
| #define vmul_vx_u8m8_m | ( | ... | ) | __riscv_vmul_vx_u8m8_tumu(__VA_ARGS__) |
| #define vmul_vx_u8mf2 | ( | ... | ) | __riscv_vmul_vx_u8mf2(__VA_ARGS__) |
| #define vmul_vx_u8mf2_m | ( | ... | ) | __riscv_vmul_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vmul_vx_u8mf4 | ( | ... | ) | __riscv_vmul_vx_u8mf4(__VA_ARGS__) |
| #define vmul_vx_u8mf4_m | ( | ... | ) | __riscv_vmul_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vmul_vx_u8mf8 | ( | ... | ) | __riscv_vmul_vx_u8mf8(__VA_ARGS__) |
| #define vmul_vx_u8mf8_m | ( | ... | ) | __riscv_vmul_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vmulh_vv_i16m1 | ( | ... | ) | __riscv_vmulh_vv_i16m1(__VA_ARGS__) |
| #define vmulh_vv_i16m1_m | ( | ... | ) | __riscv_vmulh_vv_i16m1_tumu(__VA_ARGS__) |
| #define vmulh_vv_i16m2 | ( | ... | ) | __riscv_vmulh_vv_i16m2(__VA_ARGS__) |
| #define vmulh_vv_i16m2_m | ( | ... | ) | __riscv_vmulh_vv_i16m2_tumu(__VA_ARGS__) |
| #define vmulh_vv_i16m4 | ( | ... | ) | __riscv_vmulh_vv_i16m4(__VA_ARGS__) |
| #define vmulh_vv_i16m4_m | ( | ... | ) | __riscv_vmulh_vv_i16m4_tumu(__VA_ARGS__) |
| #define vmulh_vv_i16m8 | ( | ... | ) | __riscv_vmulh_vv_i16m8(__VA_ARGS__) |
| #define vmulh_vv_i16m8_m | ( | ... | ) | __riscv_vmulh_vv_i16m8_tumu(__VA_ARGS__) |
| #define vmulh_vv_i16mf2 | ( | ... | ) | __riscv_vmulh_vv_i16mf2(__VA_ARGS__) |
| #define vmulh_vv_i16mf2_m | ( | ... | ) | __riscv_vmulh_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vmulh_vv_i16mf4 | ( | ... | ) | __riscv_vmulh_vv_i16mf4(__VA_ARGS__) |
| #define vmulh_vv_i16mf4_m | ( | ... | ) | __riscv_vmulh_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vmulh_vv_i32m1 | ( | ... | ) | __riscv_vmulh_vv_i32m1(__VA_ARGS__) |
| #define vmulh_vv_i32m1_m | ( | ... | ) | __riscv_vmulh_vv_i32m1_tumu(__VA_ARGS__) |
| #define vmulh_vv_i32m2 | ( | ... | ) | __riscv_vmulh_vv_i32m2(__VA_ARGS__) |
| #define vmulh_vv_i32m2_m | ( | ... | ) | __riscv_vmulh_vv_i32m2_tumu(__VA_ARGS__) |
| #define vmulh_vv_i32m4 | ( | ... | ) | __riscv_vmulh_vv_i32m4(__VA_ARGS__) |
| #define vmulh_vv_i32m4_m | ( | ... | ) | __riscv_vmulh_vv_i32m4_tumu(__VA_ARGS__) |
| #define vmulh_vv_i32m8 | ( | ... | ) | __riscv_vmulh_vv_i32m8(__VA_ARGS__) |
| #define vmulh_vv_i32m8_m | ( | ... | ) | __riscv_vmulh_vv_i32m8_tumu(__VA_ARGS__) |
| #define vmulh_vv_i32mf2 | ( | ... | ) | __riscv_vmulh_vv_i32mf2(__VA_ARGS__) |
| #define vmulh_vv_i32mf2_m | ( | ... | ) | __riscv_vmulh_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vmulh_vv_i64m1 | ( | ... | ) | __riscv_vmulh_vv_i64m1(__VA_ARGS__) |
| #define vmulh_vv_i64m1_m | ( | ... | ) | __riscv_vmulh_vv_i64m1_tumu(__VA_ARGS__) |
| #define vmulh_vv_i64m2 | ( | ... | ) | __riscv_vmulh_vv_i64m2(__VA_ARGS__) |
| #define vmulh_vv_i64m2_m | ( | ... | ) | __riscv_vmulh_vv_i64m2_tumu(__VA_ARGS__) |
| #define vmulh_vv_i64m4 | ( | ... | ) | __riscv_vmulh_vv_i64m4(__VA_ARGS__) |
| #define vmulh_vv_i64m4_m | ( | ... | ) | __riscv_vmulh_vv_i64m4_tumu(__VA_ARGS__) |
| #define vmulh_vv_i64m8 | ( | ... | ) | __riscv_vmulh_vv_i64m8(__VA_ARGS__) |
| #define vmulh_vv_i64m8_m | ( | ... | ) | __riscv_vmulh_vv_i64m8_tumu(__VA_ARGS__) |
| #define vmulh_vv_i8m1 | ( | ... | ) | __riscv_vmulh_vv_i8m1(__VA_ARGS__) |
| #define vmulh_vv_i8m1_m | ( | ... | ) | __riscv_vmulh_vv_i8m1_tumu(__VA_ARGS__) |
| #define vmulh_vv_i8m2 | ( | ... | ) | __riscv_vmulh_vv_i8m2(__VA_ARGS__) |
| #define vmulh_vv_i8m2_m | ( | ... | ) | __riscv_vmulh_vv_i8m2_tumu(__VA_ARGS__) |
| #define vmulh_vv_i8m4 | ( | ... | ) | __riscv_vmulh_vv_i8m4(__VA_ARGS__) |
| #define vmulh_vv_i8m4_m | ( | ... | ) | __riscv_vmulh_vv_i8m4_tumu(__VA_ARGS__) |
| #define vmulh_vv_i8m8 | ( | ... | ) | __riscv_vmulh_vv_i8m8(__VA_ARGS__) |
| #define vmulh_vv_i8m8_m | ( | ... | ) | __riscv_vmulh_vv_i8m8_tumu(__VA_ARGS__) |
| #define vmulh_vv_i8mf2 | ( | ... | ) | __riscv_vmulh_vv_i8mf2(__VA_ARGS__) |
| #define vmulh_vv_i8mf2_m | ( | ... | ) | __riscv_vmulh_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vmulh_vv_i8mf4 | ( | ... | ) | __riscv_vmulh_vv_i8mf4(__VA_ARGS__) |
| #define vmulh_vv_i8mf4_m | ( | ... | ) | __riscv_vmulh_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vmulh_vv_i8mf8 | ( | ... | ) | __riscv_vmulh_vv_i8mf8(__VA_ARGS__) |
| #define vmulh_vv_i8mf8_m | ( | ... | ) | __riscv_vmulh_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vmulh_vx_i16m1 | ( | ... | ) | __riscv_vmulh_vx_i16m1(__VA_ARGS__) |
| #define vmulh_vx_i16m1_m | ( | ... | ) | __riscv_vmulh_vx_i16m1_tumu(__VA_ARGS__) |
| #define vmulh_vx_i16m2 | ( | ... | ) | __riscv_vmulh_vx_i16m2(__VA_ARGS__) |
| #define vmulh_vx_i16m2_m | ( | ... | ) | __riscv_vmulh_vx_i16m2_tumu(__VA_ARGS__) |
| #define vmulh_vx_i16m4 | ( | ... | ) | __riscv_vmulh_vx_i16m4(__VA_ARGS__) |
| #define vmulh_vx_i16m4_m | ( | ... | ) | __riscv_vmulh_vx_i16m4_tumu(__VA_ARGS__) |
| #define vmulh_vx_i16m8 | ( | ... | ) | __riscv_vmulh_vx_i16m8(__VA_ARGS__) |
| #define vmulh_vx_i16m8_m | ( | ... | ) | __riscv_vmulh_vx_i16m8_tumu(__VA_ARGS__) |
| #define vmulh_vx_i16mf2 | ( | ... | ) | __riscv_vmulh_vx_i16mf2(__VA_ARGS__) |
| #define vmulh_vx_i16mf2_m | ( | ... | ) | __riscv_vmulh_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vmulh_vx_i16mf4 | ( | ... | ) | __riscv_vmulh_vx_i16mf4(__VA_ARGS__) |
| #define vmulh_vx_i16mf4_m | ( | ... | ) | __riscv_vmulh_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vmulh_vx_i32m1 | ( | ... | ) | __riscv_vmulh_vx_i32m1(__VA_ARGS__) |
| #define vmulh_vx_i32m1_m | ( | ... | ) | __riscv_vmulh_vx_i32m1_tumu(__VA_ARGS__) |
| #define vmulh_vx_i32m2 | ( | ... | ) | __riscv_vmulh_vx_i32m2(__VA_ARGS__) |
| #define vmulh_vx_i32m2_m | ( | ... | ) | __riscv_vmulh_vx_i32m2_tumu(__VA_ARGS__) |
| #define vmulh_vx_i32m4 | ( | ... | ) | __riscv_vmulh_vx_i32m4(__VA_ARGS__) |
| #define vmulh_vx_i32m4_m | ( | ... | ) | __riscv_vmulh_vx_i32m4_tumu(__VA_ARGS__) |
| #define vmulh_vx_i32m8 | ( | ... | ) | __riscv_vmulh_vx_i32m8(__VA_ARGS__) |
| #define vmulh_vx_i32m8_m | ( | ... | ) | __riscv_vmulh_vx_i32m8_tumu(__VA_ARGS__) |
| #define vmulh_vx_i32mf2 | ( | ... | ) | __riscv_vmulh_vx_i32mf2(__VA_ARGS__) |
| #define vmulh_vx_i32mf2_m | ( | ... | ) | __riscv_vmulh_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vmulh_vx_i64m1 | ( | ... | ) | __riscv_vmulh_vx_i64m1(__VA_ARGS__) |
| #define vmulh_vx_i64m1_m | ( | ... | ) | __riscv_vmulh_vx_i64m1_tumu(__VA_ARGS__) |
| #define vmulh_vx_i64m2 | ( | ... | ) | __riscv_vmulh_vx_i64m2(__VA_ARGS__) |
| #define vmulh_vx_i64m2_m | ( | ... | ) | __riscv_vmulh_vx_i64m2_tumu(__VA_ARGS__) |
| #define vmulh_vx_i64m4 | ( | ... | ) | __riscv_vmulh_vx_i64m4(__VA_ARGS__) |
| #define vmulh_vx_i64m4_m | ( | ... | ) | __riscv_vmulh_vx_i64m4_tumu(__VA_ARGS__) |
| #define vmulh_vx_i64m8 | ( | ... | ) | __riscv_vmulh_vx_i64m8(__VA_ARGS__) |
| #define vmulh_vx_i64m8_m | ( | ... | ) | __riscv_vmulh_vx_i64m8_tumu(__VA_ARGS__) |
| #define vmulh_vx_i8m1 | ( | ... | ) | __riscv_vmulh_vx_i8m1(__VA_ARGS__) |
| #define vmulh_vx_i8m1_m | ( | ... | ) | __riscv_vmulh_vx_i8m1_tumu(__VA_ARGS__) |
| #define vmulh_vx_i8m2 | ( | ... | ) | __riscv_vmulh_vx_i8m2(__VA_ARGS__) |
| #define vmulh_vx_i8m2_m | ( | ... | ) | __riscv_vmulh_vx_i8m2_tumu(__VA_ARGS__) |
| #define vmulh_vx_i8m4 | ( | ... | ) | __riscv_vmulh_vx_i8m4(__VA_ARGS__) |
| #define vmulh_vx_i8m4_m | ( | ... | ) | __riscv_vmulh_vx_i8m4_tumu(__VA_ARGS__) |
| #define vmulh_vx_i8m8 | ( | ... | ) | __riscv_vmulh_vx_i8m8(__VA_ARGS__) |
| #define vmulh_vx_i8m8_m | ( | ... | ) | __riscv_vmulh_vx_i8m8_tumu(__VA_ARGS__) |
| #define vmulh_vx_i8mf2 | ( | ... | ) | __riscv_vmulh_vx_i8mf2(__VA_ARGS__) |
| #define vmulh_vx_i8mf2_m | ( | ... | ) | __riscv_vmulh_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vmulh_vx_i8mf4 | ( | ... | ) | __riscv_vmulh_vx_i8mf4(__VA_ARGS__) |
| #define vmulh_vx_i8mf4_m | ( | ... | ) | __riscv_vmulh_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vmulh_vx_i8mf8 | ( | ... | ) | __riscv_vmulh_vx_i8mf8(__VA_ARGS__) |
| #define vmulh_vx_i8mf8_m | ( | ... | ) | __riscv_vmulh_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i16m1 | ( | ... | ) | __riscv_vmulhsu_vv_i16m1(__VA_ARGS__) |
| #define vmulhsu_vv_i16m1_m | ( | ... | ) | __riscv_vmulhsu_vv_i16m1_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i16m2 | ( | ... | ) | __riscv_vmulhsu_vv_i16m2(__VA_ARGS__) |
| #define vmulhsu_vv_i16m2_m | ( | ... | ) | __riscv_vmulhsu_vv_i16m2_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i16m4 | ( | ... | ) | __riscv_vmulhsu_vv_i16m4(__VA_ARGS__) |
| #define vmulhsu_vv_i16m4_m | ( | ... | ) | __riscv_vmulhsu_vv_i16m4_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i16m8 | ( | ... | ) | __riscv_vmulhsu_vv_i16m8(__VA_ARGS__) |
| #define vmulhsu_vv_i16m8_m | ( | ... | ) | __riscv_vmulhsu_vv_i16m8_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i16mf2 | ( | ... | ) | __riscv_vmulhsu_vv_i16mf2(__VA_ARGS__) |
| #define vmulhsu_vv_i16mf2_m | ( | ... | ) | __riscv_vmulhsu_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i16mf4 | ( | ... | ) | __riscv_vmulhsu_vv_i16mf4(__VA_ARGS__) |
| #define vmulhsu_vv_i16mf4_m | ( | ... | ) | __riscv_vmulhsu_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i32m1 | ( | ... | ) | __riscv_vmulhsu_vv_i32m1(__VA_ARGS__) |
| #define vmulhsu_vv_i32m1_m | ( | ... | ) | __riscv_vmulhsu_vv_i32m1_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i32m2 | ( | ... | ) | __riscv_vmulhsu_vv_i32m2(__VA_ARGS__) |
| #define vmulhsu_vv_i32m2_m | ( | ... | ) | __riscv_vmulhsu_vv_i32m2_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i32m4 | ( | ... | ) | __riscv_vmulhsu_vv_i32m4(__VA_ARGS__) |
| #define vmulhsu_vv_i32m4_m | ( | ... | ) | __riscv_vmulhsu_vv_i32m4_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i32m8 | ( | ... | ) | __riscv_vmulhsu_vv_i32m8(__VA_ARGS__) |
| #define vmulhsu_vv_i32m8_m | ( | ... | ) | __riscv_vmulhsu_vv_i32m8_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i32mf2 | ( | ... | ) | __riscv_vmulhsu_vv_i32mf2(__VA_ARGS__) |
| #define vmulhsu_vv_i32mf2_m | ( | ... | ) | __riscv_vmulhsu_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i64m1 | ( | ... | ) | __riscv_vmulhsu_vv_i64m1(__VA_ARGS__) |
| #define vmulhsu_vv_i64m1_m | ( | ... | ) | __riscv_vmulhsu_vv_i64m1_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i64m2 | ( | ... | ) | __riscv_vmulhsu_vv_i64m2(__VA_ARGS__) |
| #define vmulhsu_vv_i64m2_m | ( | ... | ) | __riscv_vmulhsu_vv_i64m2_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i64m4 | ( | ... | ) | __riscv_vmulhsu_vv_i64m4(__VA_ARGS__) |
| #define vmulhsu_vv_i64m4_m | ( | ... | ) | __riscv_vmulhsu_vv_i64m4_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i64m8 | ( | ... | ) | __riscv_vmulhsu_vv_i64m8(__VA_ARGS__) |
| #define vmulhsu_vv_i64m8_m | ( | ... | ) | __riscv_vmulhsu_vv_i64m8_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i8m1 | ( | ... | ) | __riscv_vmulhsu_vv_i8m1(__VA_ARGS__) |
| #define vmulhsu_vv_i8m1_m | ( | ... | ) | __riscv_vmulhsu_vv_i8m1_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i8m2 | ( | ... | ) | __riscv_vmulhsu_vv_i8m2(__VA_ARGS__) |
| #define vmulhsu_vv_i8m2_m | ( | ... | ) | __riscv_vmulhsu_vv_i8m2_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i8m4 | ( | ... | ) | __riscv_vmulhsu_vv_i8m4(__VA_ARGS__) |
| #define vmulhsu_vv_i8m4_m | ( | ... | ) | __riscv_vmulhsu_vv_i8m4_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i8m8 | ( | ... | ) | __riscv_vmulhsu_vv_i8m8(__VA_ARGS__) |
| #define vmulhsu_vv_i8m8_m | ( | ... | ) | __riscv_vmulhsu_vv_i8m8_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i8mf2 | ( | ... | ) | __riscv_vmulhsu_vv_i8mf2(__VA_ARGS__) |
| #define vmulhsu_vv_i8mf2_m | ( | ... | ) | __riscv_vmulhsu_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i8mf4 | ( | ... | ) | __riscv_vmulhsu_vv_i8mf4(__VA_ARGS__) |
| #define vmulhsu_vv_i8mf4_m | ( | ... | ) | __riscv_vmulhsu_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vmulhsu_vv_i8mf8 | ( | ... | ) | __riscv_vmulhsu_vv_i8mf8(__VA_ARGS__) |
| #define vmulhsu_vv_i8mf8_m | ( | ... | ) | __riscv_vmulhsu_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i16m1 | ( | ... | ) | __riscv_vmulhsu_vx_i16m1(__VA_ARGS__) |
| #define vmulhsu_vx_i16m1_m | ( | ... | ) | __riscv_vmulhsu_vx_i16m1_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i16m2 | ( | ... | ) | __riscv_vmulhsu_vx_i16m2(__VA_ARGS__) |
| #define vmulhsu_vx_i16m2_m | ( | ... | ) | __riscv_vmulhsu_vx_i16m2_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i16m4 | ( | ... | ) | __riscv_vmulhsu_vx_i16m4(__VA_ARGS__) |
| #define vmulhsu_vx_i16m4_m | ( | ... | ) | __riscv_vmulhsu_vx_i16m4_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i16m8 | ( | ... | ) | __riscv_vmulhsu_vx_i16m8(__VA_ARGS__) |
| #define vmulhsu_vx_i16m8_m | ( | ... | ) | __riscv_vmulhsu_vx_i16m8_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i16mf2 | ( | ... | ) | __riscv_vmulhsu_vx_i16mf2(__VA_ARGS__) |
| #define vmulhsu_vx_i16mf2_m | ( | ... | ) | __riscv_vmulhsu_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i16mf4 | ( | ... | ) | __riscv_vmulhsu_vx_i16mf4(__VA_ARGS__) |
| #define vmulhsu_vx_i16mf4_m | ( | ... | ) | __riscv_vmulhsu_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i32m1 | ( | ... | ) | __riscv_vmulhsu_vx_i32m1(__VA_ARGS__) |
| #define vmulhsu_vx_i32m1_m | ( | ... | ) | __riscv_vmulhsu_vx_i32m1_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i32m2 | ( | ... | ) | __riscv_vmulhsu_vx_i32m2(__VA_ARGS__) |
| #define vmulhsu_vx_i32m2_m | ( | ... | ) | __riscv_vmulhsu_vx_i32m2_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i32m4 | ( | ... | ) | __riscv_vmulhsu_vx_i32m4(__VA_ARGS__) |
| #define vmulhsu_vx_i32m4_m | ( | ... | ) | __riscv_vmulhsu_vx_i32m4_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i32m8 | ( | ... | ) | __riscv_vmulhsu_vx_i32m8(__VA_ARGS__) |
| #define vmulhsu_vx_i32m8_m | ( | ... | ) | __riscv_vmulhsu_vx_i32m8_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i32mf2 | ( | ... | ) | __riscv_vmulhsu_vx_i32mf2(__VA_ARGS__) |
| #define vmulhsu_vx_i32mf2_m | ( | ... | ) | __riscv_vmulhsu_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i64m1 | ( | ... | ) | __riscv_vmulhsu_vx_i64m1(__VA_ARGS__) |
| #define vmulhsu_vx_i64m1_m | ( | ... | ) | __riscv_vmulhsu_vx_i64m1_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i64m2 | ( | ... | ) | __riscv_vmulhsu_vx_i64m2(__VA_ARGS__) |
| #define vmulhsu_vx_i64m2_m | ( | ... | ) | __riscv_vmulhsu_vx_i64m2_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i64m4 | ( | ... | ) | __riscv_vmulhsu_vx_i64m4(__VA_ARGS__) |
| #define vmulhsu_vx_i64m4_m | ( | ... | ) | __riscv_vmulhsu_vx_i64m4_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i64m8 | ( | ... | ) | __riscv_vmulhsu_vx_i64m8(__VA_ARGS__) |
| #define vmulhsu_vx_i64m8_m | ( | ... | ) | __riscv_vmulhsu_vx_i64m8_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i8m1 | ( | ... | ) | __riscv_vmulhsu_vx_i8m1(__VA_ARGS__) |
| #define vmulhsu_vx_i8m1_m | ( | ... | ) | __riscv_vmulhsu_vx_i8m1_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i8m2 | ( | ... | ) | __riscv_vmulhsu_vx_i8m2(__VA_ARGS__) |
| #define vmulhsu_vx_i8m2_m | ( | ... | ) | __riscv_vmulhsu_vx_i8m2_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i8m4 | ( | ... | ) | __riscv_vmulhsu_vx_i8m4(__VA_ARGS__) |
| #define vmulhsu_vx_i8m4_m | ( | ... | ) | __riscv_vmulhsu_vx_i8m4_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i8m8 | ( | ... | ) | __riscv_vmulhsu_vx_i8m8(__VA_ARGS__) |
| #define vmulhsu_vx_i8m8_m | ( | ... | ) | __riscv_vmulhsu_vx_i8m8_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i8mf2 | ( | ... | ) | __riscv_vmulhsu_vx_i8mf2(__VA_ARGS__) |
| #define vmulhsu_vx_i8mf2_m | ( | ... | ) | __riscv_vmulhsu_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i8mf4 | ( | ... | ) | __riscv_vmulhsu_vx_i8mf4(__VA_ARGS__) |
| #define vmulhsu_vx_i8mf4_m | ( | ... | ) | __riscv_vmulhsu_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vmulhsu_vx_i8mf8 | ( | ... | ) | __riscv_vmulhsu_vx_i8mf8(__VA_ARGS__) |
| #define vmulhsu_vx_i8mf8_m | ( | ... | ) | __riscv_vmulhsu_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u16m1 | ( | ... | ) | __riscv_vmulhu_vv_u16m1(__VA_ARGS__) |
| #define vmulhu_vv_u16m1_m | ( | ... | ) | __riscv_vmulhu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u16m2 | ( | ... | ) | __riscv_vmulhu_vv_u16m2(__VA_ARGS__) |
| #define vmulhu_vv_u16m2_m | ( | ... | ) | __riscv_vmulhu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u16m4 | ( | ... | ) | __riscv_vmulhu_vv_u16m4(__VA_ARGS__) |
| #define vmulhu_vv_u16m4_m | ( | ... | ) | __riscv_vmulhu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u16m8 | ( | ... | ) | __riscv_vmulhu_vv_u16m8(__VA_ARGS__) |
| #define vmulhu_vv_u16m8_m | ( | ... | ) | __riscv_vmulhu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u16mf2 | ( | ... | ) | __riscv_vmulhu_vv_u16mf2(__VA_ARGS__) |
| #define vmulhu_vv_u16mf2_m | ( | ... | ) | __riscv_vmulhu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u16mf4 | ( | ... | ) | __riscv_vmulhu_vv_u16mf4(__VA_ARGS__) |
| #define vmulhu_vv_u16mf4_m | ( | ... | ) | __riscv_vmulhu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u32m1 | ( | ... | ) | __riscv_vmulhu_vv_u32m1(__VA_ARGS__) |
| #define vmulhu_vv_u32m1_m | ( | ... | ) | __riscv_vmulhu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u32m2 | ( | ... | ) | __riscv_vmulhu_vv_u32m2(__VA_ARGS__) |
| #define vmulhu_vv_u32m2_m | ( | ... | ) | __riscv_vmulhu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u32m4 | ( | ... | ) | __riscv_vmulhu_vv_u32m4(__VA_ARGS__) |
| #define vmulhu_vv_u32m4_m | ( | ... | ) | __riscv_vmulhu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u32m8 | ( | ... | ) | __riscv_vmulhu_vv_u32m8(__VA_ARGS__) |
| #define vmulhu_vv_u32m8_m | ( | ... | ) | __riscv_vmulhu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u32mf2 | ( | ... | ) | __riscv_vmulhu_vv_u32mf2(__VA_ARGS__) |
| #define vmulhu_vv_u32mf2_m | ( | ... | ) | __riscv_vmulhu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u64m1 | ( | ... | ) | __riscv_vmulhu_vv_u64m1(__VA_ARGS__) |
| #define vmulhu_vv_u64m1_m | ( | ... | ) | __riscv_vmulhu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u64m2 | ( | ... | ) | __riscv_vmulhu_vv_u64m2(__VA_ARGS__) |
| #define vmulhu_vv_u64m2_m | ( | ... | ) | __riscv_vmulhu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u64m4 | ( | ... | ) | __riscv_vmulhu_vv_u64m4(__VA_ARGS__) |
| #define vmulhu_vv_u64m4_m | ( | ... | ) | __riscv_vmulhu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u64m8 | ( | ... | ) | __riscv_vmulhu_vv_u64m8(__VA_ARGS__) |
| #define vmulhu_vv_u64m8_m | ( | ... | ) | __riscv_vmulhu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u8m1 | ( | ... | ) | __riscv_vmulhu_vv_u8m1(__VA_ARGS__) |
| #define vmulhu_vv_u8m1_m | ( | ... | ) | __riscv_vmulhu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u8m2 | ( | ... | ) | __riscv_vmulhu_vv_u8m2(__VA_ARGS__) |
| #define vmulhu_vv_u8m2_m | ( | ... | ) | __riscv_vmulhu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u8m4 | ( | ... | ) | __riscv_vmulhu_vv_u8m4(__VA_ARGS__) |
| #define vmulhu_vv_u8m4_m | ( | ... | ) | __riscv_vmulhu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u8m8 | ( | ... | ) | __riscv_vmulhu_vv_u8m8(__VA_ARGS__) |
| #define vmulhu_vv_u8m8_m | ( | ... | ) | __riscv_vmulhu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u8mf2 | ( | ... | ) | __riscv_vmulhu_vv_u8mf2(__VA_ARGS__) |
| #define vmulhu_vv_u8mf2_m | ( | ... | ) | __riscv_vmulhu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u8mf4 | ( | ... | ) | __riscv_vmulhu_vv_u8mf4(__VA_ARGS__) |
| #define vmulhu_vv_u8mf4_m | ( | ... | ) | __riscv_vmulhu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vmulhu_vv_u8mf8 | ( | ... | ) | __riscv_vmulhu_vv_u8mf8(__VA_ARGS__) |
| #define vmulhu_vv_u8mf8_m | ( | ... | ) | __riscv_vmulhu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u16m1 | ( | ... | ) | __riscv_vmulhu_vx_u16m1(__VA_ARGS__) |
| #define vmulhu_vx_u16m1_m | ( | ... | ) | __riscv_vmulhu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u16m2 | ( | ... | ) | __riscv_vmulhu_vx_u16m2(__VA_ARGS__) |
| #define vmulhu_vx_u16m2_m | ( | ... | ) | __riscv_vmulhu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u16m4 | ( | ... | ) | __riscv_vmulhu_vx_u16m4(__VA_ARGS__) |
| #define vmulhu_vx_u16m4_m | ( | ... | ) | __riscv_vmulhu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u16m8 | ( | ... | ) | __riscv_vmulhu_vx_u16m8(__VA_ARGS__) |
| #define vmulhu_vx_u16m8_m | ( | ... | ) | __riscv_vmulhu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u16mf2 | ( | ... | ) | __riscv_vmulhu_vx_u16mf2(__VA_ARGS__) |
| #define vmulhu_vx_u16mf2_m | ( | ... | ) | __riscv_vmulhu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u16mf4 | ( | ... | ) | __riscv_vmulhu_vx_u16mf4(__VA_ARGS__) |
| #define vmulhu_vx_u16mf4_m | ( | ... | ) | __riscv_vmulhu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u32m1 | ( | ... | ) | __riscv_vmulhu_vx_u32m1(__VA_ARGS__) |
| #define vmulhu_vx_u32m1_m | ( | ... | ) | __riscv_vmulhu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u32m2 | ( | ... | ) | __riscv_vmulhu_vx_u32m2(__VA_ARGS__) |
| #define vmulhu_vx_u32m2_m | ( | ... | ) | __riscv_vmulhu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u32m4 | ( | ... | ) | __riscv_vmulhu_vx_u32m4(__VA_ARGS__) |
| #define vmulhu_vx_u32m4_m | ( | ... | ) | __riscv_vmulhu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u32m8 | ( | ... | ) | __riscv_vmulhu_vx_u32m8(__VA_ARGS__) |
| #define vmulhu_vx_u32m8_m | ( | ... | ) | __riscv_vmulhu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u32mf2 | ( | ... | ) | __riscv_vmulhu_vx_u32mf2(__VA_ARGS__) |
| #define vmulhu_vx_u32mf2_m | ( | ... | ) | __riscv_vmulhu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u64m1 | ( | ... | ) | __riscv_vmulhu_vx_u64m1(__VA_ARGS__) |
| #define vmulhu_vx_u64m1_m | ( | ... | ) | __riscv_vmulhu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u64m2 | ( | ... | ) | __riscv_vmulhu_vx_u64m2(__VA_ARGS__) |
| #define vmulhu_vx_u64m2_m | ( | ... | ) | __riscv_vmulhu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u64m4 | ( | ... | ) | __riscv_vmulhu_vx_u64m4(__VA_ARGS__) |
| #define vmulhu_vx_u64m4_m | ( | ... | ) | __riscv_vmulhu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u64m8 | ( | ... | ) | __riscv_vmulhu_vx_u64m8(__VA_ARGS__) |
| #define vmulhu_vx_u64m8_m | ( | ... | ) | __riscv_vmulhu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u8m1 | ( | ... | ) | __riscv_vmulhu_vx_u8m1(__VA_ARGS__) |
| #define vmulhu_vx_u8m1_m | ( | ... | ) | __riscv_vmulhu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u8m2 | ( | ... | ) | __riscv_vmulhu_vx_u8m2(__VA_ARGS__) |
| #define vmulhu_vx_u8m2_m | ( | ... | ) | __riscv_vmulhu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u8m4 | ( | ... | ) | __riscv_vmulhu_vx_u8m4(__VA_ARGS__) |
| #define vmulhu_vx_u8m4_m | ( | ... | ) | __riscv_vmulhu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u8m8 | ( | ... | ) | __riscv_vmulhu_vx_u8m8(__VA_ARGS__) |
| #define vmulhu_vx_u8m8_m | ( | ... | ) | __riscv_vmulhu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u8mf2 | ( | ... | ) | __riscv_vmulhu_vx_u8mf2(__VA_ARGS__) |
| #define vmulhu_vx_u8mf2_m | ( | ... | ) | __riscv_vmulhu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u8mf4 | ( | ... | ) | __riscv_vmulhu_vx_u8mf4(__VA_ARGS__) |
| #define vmulhu_vx_u8mf4_m | ( | ... | ) | __riscv_vmulhu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vmulhu_vx_u8mf8 | ( | ... | ) | __riscv_vmulhu_vx_u8mf8(__VA_ARGS__) |
| #define vmulhu_vx_u8mf8_m | ( | ... | ) | __riscv_vmulhu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vmv_s_x_i16m1 | ( | ... | ) | __riscv_vmv_s_x_i16m1_tu(__VA_ARGS__) |
| #define vmv_s_x_i16m2 | ( | ... | ) | __riscv_vmv_s_x_i16m2_tu(__VA_ARGS__) |
| #define vmv_s_x_i16m4 | ( | ... | ) | __riscv_vmv_s_x_i16m4_tu(__VA_ARGS__) |
| #define vmv_s_x_i16m8 | ( | ... | ) | __riscv_vmv_s_x_i16m8_tu(__VA_ARGS__) |
| #define vmv_s_x_i16mf2 | ( | ... | ) | __riscv_vmv_s_x_i16mf2_tu(__VA_ARGS__) |
| #define vmv_s_x_i16mf4 | ( | ... | ) | __riscv_vmv_s_x_i16mf4_tu(__VA_ARGS__) |
| #define vmv_s_x_i32m1 | ( | ... | ) | __riscv_vmv_s_x_i32m1_tu(__VA_ARGS__) |
| #define vmv_s_x_i32m2 | ( | ... | ) | __riscv_vmv_s_x_i32m2_tu(__VA_ARGS__) |
| #define vmv_s_x_i32m4 | ( | ... | ) | __riscv_vmv_s_x_i32m4_tu(__VA_ARGS__) |
| #define vmv_s_x_i32m8 | ( | ... | ) | __riscv_vmv_s_x_i32m8_tu(__VA_ARGS__) |
| #define vmv_s_x_i32mf2 | ( | ... | ) | __riscv_vmv_s_x_i32mf2_tu(__VA_ARGS__) |
| #define vmv_s_x_i64m1 | ( | ... | ) | __riscv_vmv_s_x_i64m1_tu(__VA_ARGS__) |
| #define vmv_s_x_i64m2 | ( | ... | ) | __riscv_vmv_s_x_i64m2_tu(__VA_ARGS__) |
| #define vmv_s_x_i64m4 | ( | ... | ) | __riscv_vmv_s_x_i64m4_tu(__VA_ARGS__) |
| #define vmv_s_x_i64m8 | ( | ... | ) | __riscv_vmv_s_x_i64m8_tu(__VA_ARGS__) |
| #define vmv_s_x_i8m1 | ( | ... | ) | __riscv_vmv_s_x_i8m1_tu(__VA_ARGS__) |
| #define vmv_s_x_i8m2 | ( | ... | ) | __riscv_vmv_s_x_i8m2_tu(__VA_ARGS__) |
| #define vmv_s_x_i8m4 | ( | ... | ) | __riscv_vmv_s_x_i8m4_tu(__VA_ARGS__) |
| #define vmv_s_x_i8m8 | ( | ... | ) | __riscv_vmv_s_x_i8m8_tu(__VA_ARGS__) |
| #define vmv_s_x_i8mf2 | ( | ... | ) | __riscv_vmv_s_x_i8mf2_tu(__VA_ARGS__) |
| #define vmv_s_x_i8mf4 | ( | ... | ) | __riscv_vmv_s_x_i8mf4_tu(__VA_ARGS__) |
| #define vmv_s_x_i8mf8 | ( | ... | ) | __riscv_vmv_s_x_i8mf8_tu(__VA_ARGS__) |
| #define vmv_s_x_u16m1 | ( | ... | ) | __riscv_vmv_s_x_u16m1_tu(__VA_ARGS__) |
| #define vmv_s_x_u16m2 | ( | ... | ) | __riscv_vmv_s_x_u16m2_tu(__VA_ARGS__) |
| #define vmv_s_x_u16m4 | ( | ... | ) | __riscv_vmv_s_x_u16m4_tu(__VA_ARGS__) |
| #define vmv_s_x_u16m8 | ( | ... | ) | __riscv_vmv_s_x_u16m8_tu(__VA_ARGS__) |
| #define vmv_s_x_u16mf2 | ( | ... | ) | __riscv_vmv_s_x_u16mf2_tu(__VA_ARGS__) |
| #define vmv_s_x_u16mf4 | ( | ... | ) | __riscv_vmv_s_x_u16mf4_tu(__VA_ARGS__) |
| #define vmv_s_x_u32m1 | ( | ... | ) | __riscv_vmv_s_x_u32m1_tu(__VA_ARGS__) |
| #define vmv_s_x_u32m2 | ( | ... | ) | __riscv_vmv_s_x_u32m2_tu(__VA_ARGS__) |
| #define vmv_s_x_u32m4 | ( | ... | ) | __riscv_vmv_s_x_u32m4_tu(__VA_ARGS__) |
| #define vmv_s_x_u32m8 | ( | ... | ) | __riscv_vmv_s_x_u32m8_tu(__VA_ARGS__) |
| #define vmv_s_x_u32mf2 | ( | ... | ) | __riscv_vmv_s_x_u32mf2_tu(__VA_ARGS__) |
| #define vmv_s_x_u64m1 | ( | ... | ) | __riscv_vmv_s_x_u64m1_tu(__VA_ARGS__) |
| #define vmv_s_x_u64m2 | ( | ... | ) | __riscv_vmv_s_x_u64m2_tu(__VA_ARGS__) |
| #define vmv_s_x_u64m4 | ( | ... | ) | __riscv_vmv_s_x_u64m4_tu(__VA_ARGS__) |
| #define vmv_s_x_u64m8 | ( | ... | ) | __riscv_vmv_s_x_u64m8_tu(__VA_ARGS__) |
| #define vmv_s_x_u8m1 | ( | ... | ) | __riscv_vmv_s_x_u8m1_tu(__VA_ARGS__) |
| #define vmv_s_x_u8m2 | ( | ... | ) | __riscv_vmv_s_x_u8m2_tu(__VA_ARGS__) |
| #define vmv_s_x_u8m4 | ( | ... | ) | __riscv_vmv_s_x_u8m4_tu(__VA_ARGS__) |
| #define vmv_s_x_u8m8 | ( | ... | ) | __riscv_vmv_s_x_u8m8_tu(__VA_ARGS__) |
| #define vmv_s_x_u8mf2 | ( | ... | ) | __riscv_vmv_s_x_u8mf2_tu(__VA_ARGS__) |
| #define vmv_s_x_u8mf4 | ( | ... | ) | __riscv_vmv_s_x_u8mf4_tu(__VA_ARGS__) |
| #define vmv_s_x_u8mf8 | ( | ... | ) | __riscv_vmv_s_x_u8mf8_tu(__VA_ARGS__) |
| #define vmv_v_v_f16m1 | ( | ... | ) | __riscv_vmv_v_v_f16m1(__VA_ARGS__) |
| #define vmv_v_v_f16m2 | ( | ... | ) | __riscv_vmv_v_v_f16m2(__VA_ARGS__) |
| #define vmv_v_v_f16m4 | ( | ... | ) | __riscv_vmv_v_v_f16m4(__VA_ARGS__) |
| #define vmv_v_v_f16m8 | ( | ... | ) | __riscv_vmv_v_v_f16m8(__VA_ARGS__) |
| #define vmv_v_v_f16mf2 | ( | ... | ) | __riscv_vmv_v_v_f16mf2(__VA_ARGS__) |
| #define vmv_v_v_f16mf4 | ( | ... | ) | __riscv_vmv_v_v_f16mf4(__VA_ARGS__) |
| #define vmv_v_v_f32m1 | ( | ... | ) | __riscv_vmv_v_v_f32m1(__VA_ARGS__) |
| #define vmv_v_v_f32m2 | ( | ... | ) | __riscv_vmv_v_v_f32m2(__VA_ARGS__) |
| #define vmv_v_v_f32m4 | ( | ... | ) | __riscv_vmv_v_v_f32m4(__VA_ARGS__) |
| #define vmv_v_v_f32m8 | ( | ... | ) | __riscv_vmv_v_v_f32m8(__VA_ARGS__) |
| #define vmv_v_v_f32mf2 | ( | ... | ) | __riscv_vmv_v_v_f32mf2(__VA_ARGS__) |
| #define vmv_v_v_f64m1 | ( | ... | ) | __riscv_vmv_v_v_f64m1(__VA_ARGS__) |
| #define vmv_v_v_f64m2 | ( | ... | ) | __riscv_vmv_v_v_f64m2(__VA_ARGS__) |
| #define vmv_v_v_f64m4 | ( | ... | ) | __riscv_vmv_v_v_f64m4(__VA_ARGS__) |
| #define vmv_v_v_f64m8 | ( | ... | ) | __riscv_vmv_v_v_f64m8(__VA_ARGS__) |
| #define vmv_v_v_i16m1 | ( | ... | ) | __riscv_vmv_v_v_i16m1(__VA_ARGS__) |
| #define vmv_v_v_i16m2 | ( | ... | ) | __riscv_vmv_v_v_i16m2(__VA_ARGS__) |
| #define vmv_v_v_i16m4 | ( | ... | ) | __riscv_vmv_v_v_i16m4(__VA_ARGS__) |
| #define vmv_v_v_i16m8 | ( | ... | ) | __riscv_vmv_v_v_i16m8(__VA_ARGS__) |
| #define vmv_v_v_i16mf2 | ( | ... | ) | __riscv_vmv_v_v_i16mf2(__VA_ARGS__) |
| #define vmv_v_v_i16mf4 | ( | ... | ) | __riscv_vmv_v_v_i16mf4(__VA_ARGS__) |
| #define vmv_v_v_i32m1 | ( | ... | ) | __riscv_vmv_v_v_i32m1(__VA_ARGS__) |
| #define vmv_v_v_i32m2 | ( | ... | ) | __riscv_vmv_v_v_i32m2(__VA_ARGS__) |
| #define vmv_v_v_i32m4 | ( | ... | ) | __riscv_vmv_v_v_i32m4(__VA_ARGS__) |
| #define vmv_v_v_i32m8 | ( | ... | ) | __riscv_vmv_v_v_i32m8(__VA_ARGS__) |
| #define vmv_v_v_i32mf2 | ( | ... | ) | __riscv_vmv_v_v_i32mf2(__VA_ARGS__) |
| #define vmv_v_v_i64m1 | ( | ... | ) | __riscv_vmv_v_v_i64m1(__VA_ARGS__) |
| #define vmv_v_v_i64m2 | ( | ... | ) | __riscv_vmv_v_v_i64m2(__VA_ARGS__) |
| #define vmv_v_v_i64m4 | ( | ... | ) | __riscv_vmv_v_v_i64m4(__VA_ARGS__) |
| #define vmv_v_v_i64m8 | ( | ... | ) | __riscv_vmv_v_v_i64m8(__VA_ARGS__) |
| #define vmv_v_v_i8m1 | ( | ... | ) | __riscv_vmv_v_v_i8m1(__VA_ARGS__) |
| #define vmv_v_v_i8m2 | ( | ... | ) | __riscv_vmv_v_v_i8m2(__VA_ARGS__) |
| #define vmv_v_v_i8m4 | ( | ... | ) | __riscv_vmv_v_v_i8m4(__VA_ARGS__) |
| #define vmv_v_v_i8m8 | ( | ... | ) | __riscv_vmv_v_v_i8m8(__VA_ARGS__) |
| #define vmv_v_v_i8mf2 | ( | ... | ) | __riscv_vmv_v_v_i8mf2(__VA_ARGS__) |
| #define vmv_v_v_i8mf4 | ( | ... | ) | __riscv_vmv_v_v_i8mf4(__VA_ARGS__) |
| #define vmv_v_v_i8mf8 | ( | ... | ) | __riscv_vmv_v_v_i8mf8(__VA_ARGS__) |
| #define vmv_v_v_u16m1 | ( | ... | ) | __riscv_vmv_v_v_u16m1(__VA_ARGS__) |
| #define vmv_v_v_u16m2 | ( | ... | ) | __riscv_vmv_v_v_u16m2(__VA_ARGS__) |
| #define vmv_v_v_u16m4 | ( | ... | ) | __riscv_vmv_v_v_u16m4(__VA_ARGS__) |
| #define vmv_v_v_u16m8 | ( | ... | ) | __riscv_vmv_v_v_u16m8(__VA_ARGS__) |
| #define vmv_v_v_u16mf2 | ( | ... | ) | __riscv_vmv_v_v_u16mf2(__VA_ARGS__) |
| #define vmv_v_v_u16mf4 | ( | ... | ) | __riscv_vmv_v_v_u16mf4(__VA_ARGS__) |
| #define vmv_v_v_u32m1 | ( | ... | ) | __riscv_vmv_v_v_u32m1(__VA_ARGS__) |
| #define vmv_v_v_u32m2 | ( | ... | ) | __riscv_vmv_v_v_u32m2(__VA_ARGS__) |
| #define vmv_v_v_u32m4 | ( | ... | ) | __riscv_vmv_v_v_u32m4(__VA_ARGS__) |
| #define vmv_v_v_u32m8 | ( | ... | ) | __riscv_vmv_v_v_u32m8(__VA_ARGS__) |
| #define vmv_v_v_u32mf2 | ( | ... | ) | __riscv_vmv_v_v_u32mf2(__VA_ARGS__) |
| #define vmv_v_v_u64m1 | ( | ... | ) | __riscv_vmv_v_v_u64m1(__VA_ARGS__) |
| #define vmv_v_v_u64m2 | ( | ... | ) | __riscv_vmv_v_v_u64m2(__VA_ARGS__) |
| #define vmv_v_v_u64m4 | ( | ... | ) | __riscv_vmv_v_v_u64m4(__VA_ARGS__) |
| #define vmv_v_v_u64m8 | ( | ... | ) | __riscv_vmv_v_v_u64m8(__VA_ARGS__) |
| #define vmv_v_v_u8m1 | ( | ... | ) | __riscv_vmv_v_v_u8m1(__VA_ARGS__) |
| #define vmv_v_v_u8m2 | ( | ... | ) | __riscv_vmv_v_v_u8m2(__VA_ARGS__) |
| #define vmv_v_v_u8m4 | ( | ... | ) | __riscv_vmv_v_v_u8m4(__VA_ARGS__) |
| #define vmv_v_v_u8m8 | ( | ... | ) | __riscv_vmv_v_v_u8m8(__VA_ARGS__) |
| #define vmv_v_v_u8mf2 | ( | ... | ) | __riscv_vmv_v_v_u8mf2(__VA_ARGS__) |
| #define vmv_v_v_u8mf4 | ( | ... | ) | __riscv_vmv_v_v_u8mf4(__VA_ARGS__) |
| #define vmv_v_v_u8mf8 | ( | ... | ) | __riscv_vmv_v_v_u8mf8(__VA_ARGS__) |
| #define vmv_v_x_i16m1 | ( | ... | ) | __riscv_vmv_v_x_i16m1(__VA_ARGS__) |
| #define vmv_v_x_i16m2 | ( | ... | ) | __riscv_vmv_v_x_i16m2(__VA_ARGS__) |
| #define vmv_v_x_i16m4 | ( | ... | ) | __riscv_vmv_v_x_i16m4(__VA_ARGS__) |
| #define vmv_v_x_i16m8 | ( | ... | ) | __riscv_vmv_v_x_i16m8(__VA_ARGS__) |
| #define vmv_v_x_i16mf2 | ( | ... | ) | __riscv_vmv_v_x_i16mf2(__VA_ARGS__) |
| #define vmv_v_x_i16mf4 | ( | ... | ) | __riscv_vmv_v_x_i16mf4(__VA_ARGS__) |
| #define vmv_v_x_i32m1 | ( | ... | ) | __riscv_vmv_v_x_i32m1(__VA_ARGS__) |
| #define vmv_v_x_i32m2 | ( | ... | ) | __riscv_vmv_v_x_i32m2(__VA_ARGS__) |
| #define vmv_v_x_i32m4 | ( | ... | ) | __riscv_vmv_v_x_i32m4(__VA_ARGS__) |
| #define vmv_v_x_i32m8 | ( | ... | ) | __riscv_vmv_v_x_i32m8(__VA_ARGS__) |
| #define vmv_v_x_i32mf2 | ( | ... | ) | __riscv_vmv_v_x_i32mf2(__VA_ARGS__) |
| #define vmv_v_x_i64m1 | ( | ... | ) | __riscv_vmv_v_x_i64m1(__VA_ARGS__) |
| #define vmv_v_x_i64m2 | ( | ... | ) | __riscv_vmv_v_x_i64m2(__VA_ARGS__) |
| #define vmv_v_x_i64m4 | ( | ... | ) | __riscv_vmv_v_x_i64m4(__VA_ARGS__) |
| #define vmv_v_x_i64m8 | ( | ... | ) | __riscv_vmv_v_x_i64m8(__VA_ARGS__) |
| #define vmv_v_x_i8m1 | ( | ... | ) | __riscv_vmv_v_x_i8m1(__VA_ARGS__) |
| #define vmv_v_x_i8m2 | ( | ... | ) | __riscv_vmv_v_x_i8m2(__VA_ARGS__) |
| #define vmv_v_x_i8m4 | ( | ... | ) | __riscv_vmv_v_x_i8m4(__VA_ARGS__) |
| #define vmv_v_x_i8m8 | ( | ... | ) | __riscv_vmv_v_x_i8m8(__VA_ARGS__) |
| #define vmv_v_x_i8mf2 | ( | ... | ) | __riscv_vmv_v_x_i8mf2(__VA_ARGS__) |
| #define vmv_v_x_i8mf4 | ( | ... | ) | __riscv_vmv_v_x_i8mf4(__VA_ARGS__) |
| #define vmv_v_x_i8mf8 | ( | ... | ) | __riscv_vmv_v_x_i8mf8(__VA_ARGS__) |
| #define vmv_v_x_u16m1 | ( | ... | ) | __riscv_vmv_v_x_u16m1(__VA_ARGS__) |
| #define vmv_v_x_u16m2 | ( | ... | ) | __riscv_vmv_v_x_u16m2(__VA_ARGS__) |
| #define vmv_v_x_u16m4 | ( | ... | ) | __riscv_vmv_v_x_u16m4(__VA_ARGS__) |
| #define vmv_v_x_u16m8 | ( | ... | ) | __riscv_vmv_v_x_u16m8(__VA_ARGS__) |
| #define vmv_v_x_u16mf2 | ( | ... | ) | __riscv_vmv_v_x_u16mf2(__VA_ARGS__) |
| #define vmv_v_x_u16mf4 | ( | ... | ) | __riscv_vmv_v_x_u16mf4(__VA_ARGS__) |
| #define vmv_v_x_u32m1 | ( | ... | ) | __riscv_vmv_v_x_u32m1(__VA_ARGS__) |
| #define vmv_v_x_u32m2 | ( | ... | ) | __riscv_vmv_v_x_u32m2(__VA_ARGS__) |
| #define vmv_v_x_u32m4 | ( | ... | ) | __riscv_vmv_v_x_u32m4(__VA_ARGS__) |
| #define vmv_v_x_u32m8 | ( | ... | ) | __riscv_vmv_v_x_u32m8(__VA_ARGS__) |
| #define vmv_v_x_u32mf2 | ( | ... | ) | __riscv_vmv_v_x_u32mf2(__VA_ARGS__) |
| #define vmv_v_x_u64m1 | ( | ... | ) | __riscv_vmv_v_x_u64m1(__VA_ARGS__) |
| #define vmv_v_x_u64m2 | ( | ... | ) | __riscv_vmv_v_x_u64m2(__VA_ARGS__) |
| #define vmv_v_x_u64m4 | ( | ... | ) | __riscv_vmv_v_x_u64m4(__VA_ARGS__) |
| #define vmv_v_x_u64m8 | ( | ... | ) | __riscv_vmv_v_x_u64m8(__VA_ARGS__) |
| #define vmv_v_x_u8m1 | ( | ... | ) | __riscv_vmv_v_x_u8m1(__VA_ARGS__) |
| #define vmv_v_x_u8m2 | ( | ... | ) | __riscv_vmv_v_x_u8m2(__VA_ARGS__) |
| #define vmv_v_x_u8m4 | ( | ... | ) | __riscv_vmv_v_x_u8m4(__VA_ARGS__) |
| #define vmv_v_x_u8m8 | ( | ... | ) | __riscv_vmv_v_x_u8m8(__VA_ARGS__) |
| #define vmv_v_x_u8mf2 | ( | ... | ) | __riscv_vmv_v_x_u8mf2(__VA_ARGS__) |
| #define vmv_v_x_u8mf4 | ( | ... | ) | __riscv_vmv_v_x_u8mf4(__VA_ARGS__) |
| #define vmv_v_x_u8mf8 | ( | ... | ) | __riscv_vmv_v_x_u8mf8(__VA_ARGS__) |
| #define vmv_x_s_i16m1_i16 | ( | ... | ) | __riscv_vmv_x_s_i16m1_i16(__VA_ARGS__) |
| #define vmv_x_s_i16m2_i16 | ( | ... | ) | __riscv_vmv_x_s_i16m2_i16(__VA_ARGS__) |
| #define vmv_x_s_i16m4_i16 | ( | ... | ) | __riscv_vmv_x_s_i16m4_i16(__VA_ARGS__) |
| #define vmv_x_s_i16m8_i16 | ( | ... | ) | __riscv_vmv_x_s_i16m8_i16(__VA_ARGS__) |
| #define vmv_x_s_i16mf2_i16 | ( | ... | ) | __riscv_vmv_x_s_i16mf2_i16(__VA_ARGS__) |
| #define vmv_x_s_i16mf4_i16 | ( | ... | ) | __riscv_vmv_x_s_i16mf4_i16(__VA_ARGS__) |
| #define vmv_x_s_i32m1_i32 | ( | ... | ) | __riscv_vmv_x_s_i32m1_i32(__VA_ARGS__) |
| #define vmv_x_s_i32m2_i32 | ( | ... | ) | __riscv_vmv_x_s_i32m2_i32(__VA_ARGS__) |
| #define vmv_x_s_i32m4_i32 | ( | ... | ) | __riscv_vmv_x_s_i32m4_i32(__VA_ARGS__) |
| #define vmv_x_s_i32m8_i32 | ( | ... | ) | __riscv_vmv_x_s_i32m8_i32(__VA_ARGS__) |
| #define vmv_x_s_i32mf2_i32 | ( | ... | ) | __riscv_vmv_x_s_i32mf2_i32(__VA_ARGS__) |
| #define vmv_x_s_i64m1_i64 | ( | ... | ) | __riscv_vmv_x_s_i64m1_i64(__VA_ARGS__) |
| #define vmv_x_s_i64m2_i64 | ( | ... | ) | __riscv_vmv_x_s_i64m2_i64(__VA_ARGS__) |
| #define vmv_x_s_i64m4_i64 | ( | ... | ) | __riscv_vmv_x_s_i64m4_i64(__VA_ARGS__) |
| #define vmv_x_s_i64m8_i64 | ( | ... | ) | __riscv_vmv_x_s_i64m8_i64(__VA_ARGS__) |
| #define vmv_x_s_i8m1_i8 | ( | ... | ) | __riscv_vmv_x_s_i8m1_i8(__VA_ARGS__) |
| #define vmv_x_s_i8m2_i8 | ( | ... | ) | __riscv_vmv_x_s_i8m2_i8(__VA_ARGS__) |
| #define vmv_x_s_i8m4_i8 | ( | ... | ) | __riscv_vmv_x_s_i8m4_i8(__VA_ARGS__) |
| #define vmv_x_s_i8m8_i8 | ( | ... | ) | __riscv_vmv_x_s_i8m8_i8(__VA_ARGS__) |
| #define vmv_x_s_i8mf2_i8 | ( | ... | ) | __riscv_vmv_x_s_i8mf2_i8(__VA_ARGS__) |
| #define vmv_x_s_i8mf4_i8 | ( | ... | ) | __riscv_vmv_x_s_i8mf4_i8(__VA_ARGS__) |
| #define vmv_x_s_i8mf8_i8 | ( | ... | ) | __riscv_vmv_x_s_i8mf8_i8(__VA_ARGS__) |
| #define vmv_x_s_u16m1_u16 | ( | ... | ) | __riscv_vmv_x_s_u16m1_u16(__VA_ARGS__) |
| #define vmv_x_s_u16m2_u16 | ( | ... | ) | __riscv_vmv_x_s_u16m2_u16(__VA_ARGS__) |
| #define vmv_x_s_u16m4_u16 | ( | ... | ) | __riscv_vmv_x_s_u16m4_u16(__VA_ARGS__) |
| #define vmv_x_s_u16m8_u16 | ( | ... | ) | __riscv_vmv_x_s_u16m8_u16(__VA_ARGS__) |
| #define vmv_x_s_u16mf2_u16 | ( | ... | ) | __riscv_vmv_x_s_u16mf2_u16(__VA_ARGS__) |
| #define vmv_x_s_u16mf4_u16 | ( | ... | ) | __riscv_vmv_x_s_u16mf4_u16(__VA_ARGS__) |
| #define vmv_x_s_u32m1_u32 | ( | ... | ) | __riscv_vmv_x_s_u32m1_u32(__VA_ARGS__) |
| #define vmv_x_s_u32m2_u32 | ( | ... | ) | __riscv_vmv_x_s_u32m2_u32(__VA_ARGS__) |
| #define vmv_x_s_u32m4_u32 | ( | ... | ) | __riscv_vmv_x_s_u32m4_u32(__VA_ARGS__) |
| #define vmv_x_s_u32m8_u32 | ( | ... | ) | __riscv_vmv_x_s_u32m8_u32(__VA_ARGS__) |
| #define vmv_x_s_u32mf2_u32 | ( | ... | ) | __riscv_vmv_x_s_u32mf2_u32(__VA_ARGS__) |
| #define vmv_x_s_u64m1_u64 | ( | ... | ) | __riscv_vmv_x_s_u64m1_u64(__VA_ARGS__) |
| #define vmv_x_s_u64m2_u64 | ( | ... | ) | __riscv_vmv_x_s_u64m2_u64(__VA_ARGS__) |
| #define vmv_x_s_u64m4_u64 | ( | ... | ) | __riscv_vmv_x_s_u64m4_u64(__VA_ARGS__) |
| #define vmv_x_s_u64m8_u64 | ( | ... | ) | __riscv_vmv_x_s_u64m8_u64(__VA_ARGS__) |
| #define vmv_x_s_u8m1_u8 | ( | ... | ) | __riscv_vmv_x_s_u8m1_u8(__VA_ARGS__) |
| #define vmv_x_s_u8m2_u8 | ( | ... | ) | __riscv_vmv_x_s_u8m2_u8(__VA_ARGS__) |
| #define vmv_x_s_u8m4_u8 | ( | ... | ) | __riscv_vmv_x_s_u8m4_u8(__VA_ARGS__) |
| #define vmv_x_s_u8m8_u8 | ( | ... | ) | __riscv_vmv_x_s_u8m8_u8(__VA_ARGS__) |
| #define vmv_x_s_u8mf2_u8 | ( | ... | ) | __riscv_vmv_x_s_u8mf2_u8(__VA_ARGS__) |
| #define vmv_x_s_u8mf4_u8 | ( | ... | ) | __riscv_vmv_x_s_u8mf4_u8(__VA_ARGS__) |
| #define vmv_x_s_u8mf8_u8 | ( | ... | ) | __riscv_vmv_x_s_u8mf8_u8(__VA_ARGS__) |
| #define vmxnor_mm_b1 | ( | ... | ) | __riscv_vmxnor_mm_b1(__VA_ARGS__) |
| #define vmxnor_mm_b16 | ( | ... | ) | __riscv_vmxnor_mm_b16(__VA_ARGS__) |
| #define vmxnor_mm_b2 | ( | ... | ) | __riscv_vmxnor_mm_b2(__VA_ARGS__) |
| #define vmxnor_mm_b32 | ( | ... | ) | __riscv_vmxnor_mm_b32(__VA_ARGS__) |
| #define vmxnor_mm_b4 | ( | ... | ) | __riscv_vmxnor_mm_b4(__VA_ARGS__) |
| #define vmxnor_mm_b64 | ( | ... | ) | __riscv_vmxnor_mm_b64(__VA_ARGS__) |
| #define vmxnor_mm_b8 | ( | ... | ) | __riscv_vmxnor_mm_b8(__VA_ARGS__) |
| #define vmxor_mm_b1 | ( | ... | ) | __riscv_vmxor_mm_b1(__VA_ARGS__) |
| #define vmxor_mm_b16 | ( | ... | ) | __riscv_vmxor_mm_b16(__VA_ARGS__) |
| #define vmxor_mm_b2 | ( | ... | ) | __riscv_vmxor_mm_b2(__VA_ARGS__) |
| #define vmxor_mm_b32 | ( | ... | ) | __riscv_vmxor_mm_b32(__VA_ARGS__) |
| #define vmxor_mm_b4 | ( | ... | ) | __riscv_vmxor_mm_b4(__VA_ARGS__) |
| #define vmxor_mm_b64 | ( | ... | ) | __riscv_vmxor_mm_b64(__VA_ARGS__) |
| #define vmxor_mm_b8 | ( | ... | ) | __riscv_vmxor_mm_b8(__VA_ARGS__) |
| #define vnclip_wv_i16m1 | ( | ... | ) | __riscv_vnclip_wv_i16m1(__VA_ARGS__) |
| #define vnclip_wv_i16m1_m | ( | ... | ) | __riscv_vnclip_wv_i16m1_tumu(__VA_ARGS__) |
| #define vnclip_wv_i16m2 | ( | ... | ) | __riscv_vnclip_wv_i16m2(__VA_ARGS__) |
| #define vnclip_wv_i16m2_m | ( | ... | ) | __riscv_vnclip_wv_i16m2_tumu(__VA_ARGS__) |
| #define vnclip_wv_i16m4 | ( | ... | ) | __riscv_vnclip_wv_i16m4(__VA_ARGS__) |
| #define vnclip_wv_i16m4_m | ( | ... | ) | __riscv_vnclip_wv_i16m4_tumu(__VA_ARGS__) |
| #define vnclip_wv_i16mf2 | ( | ... | ) | __riscv_vnclip_wv_i16mf2(__VA_ARGS__) |
| #define vnclip_wv_i16mf2_m | ( | ... | ) | __riscv_vnclip_wv_i16mf2_tumu(__VA_ARGS__) |
| #define vnclip_wv_i16mf4 | ( | ... | ) | __riscv_vnclip_wv_i16mf4(__VA_ARGS__) |
| #define vnclip_wv_i16mf4_m | ( | ... | ) | __riscv_vnclip_wv_i16mf4_tumu(__VA_ARGS__) |
| #define vnclip_wv_i32m1 | ( | ... | ) | __riscv_vnclip_wv_i32m1(__VA_ARGS__) |
| #define vnclip_wv_i32m1_m | ( | ... | ) | __riscv_vnclip_wv_i32m1_tumu(__VA_ARGS__) |
| #define vnclip_wv_i32m2 | ( | ... | ) | __riscv_vnclip_wv_i32m2(__VA_ARGS__) |
| #define vnclip_wv_i32m2_m | ( | ... | ) | __riscv_vnclip_wv_i32m2_tumu(__VA_ARGS__) |
| #define vnclip_wv_i32m4 | ( | ... | ) | __riscv_vnclip_wv_i32m4(__VA_ARGS__) |
| #define vnclip_wv_i32m4_m | ( | ... | ) | __riscv_vnclip_wv_i32m4_tumu(__VA_ARGS__) |
| #define vnclip_wv_i32mf2 | ( | ... | ) | __riscv_vnclip_wv_i32mf2(__VA_ARGS__) |
| #define vnclip_wv_i32mf2_m | ( | ... | ) | __riscv_vnclip_wv_i32mf2_tumu(__VA_ARGS__) |
| #define vnclip_wv_i8m1 | ( | ... | ) | __riscv_vnclip_wv_i8m1(__VA_ARGS__) |
| #define vnclip_wv_i8m1_m | ( | ... | ) | __riscv_vnclip_wv_i8m1_tumu(__VA_ARGS__) |
| #define vnclip_wv_i8m2 | ( | ... | ) | __riscv_vnclip_wv_i8m2(__VA_ARGS__) |
| #define vnclip_wv_i8m2_m | ( | ... | ) | __riscv_vnclip_wv_i8m2_tumu(__VA_ARGS__) |
| #define vnclip_wv_i8m4 | ( | ... | ) | __riscv_vnclip_wv_i8m4(__VA_ARGS__) |
| #define vnclip_wv_i8m4_m | ( | ... | ) | __riscv_vnclip_wv_i8m4_tumu(__VA_ARGS__) |
| #define vnclip_wv_i8mf2 | ( | ... | ) | __riscv_vnclip_wv_i8mf2(__VA_ARGS__) |
| #define vnclip_wv_i8mf2_m | ( | ... | ) | __riscv_vnclip_wv_i8mf2_tumu(__VA_ARGS__) |
| #define vnclip_wv_i8mf4 | ( | ... | ) | __riscv_vnclip_wv_i8mf4(__VA_ARGS__) |
| #define vnclip_wv_i8mf4_m | ( | ... | ) | __riscv_vnclip_wv_i8mf4_tumu(__VA_ARGS__) |
| #define vnclip_wv_i8mf8 | ( | ... | ) | __riscv_vnclip_wv_i8mf8(__VA_ARGS__) |
| #define vnclip_wv_i8mf8_m | ( | ... | ) | __riscv_vnclip_wv_i8mf8_tumu(__VA_ARGS__) |
| #define vnclip_wx_i16m1 | ( | ... | ) | __riscv_vnclip_wx_i16m1(__VA_ARGS__) |
| #define vnclip_wx_i16m1_m | ( | ... | ) | __riscv_vnclip_wx_i16m1_tumu(__VA_ARGS__) |
| #define vnclip_wx_i16m2 | ( | ... | ) | __riscv_vnclip_wx_i16m2(__VA_ARGS__) |
| #define vnclip_wx_i16m2_m | ( | ... | ) | __riscv_vnclip_wx_i16m2_tumu(__VA_ARGS__) |
| #define vnclip_wx_i16m4 | ( | ... | ) | __riscv_vnclip_wx_i16m4(__VA_ARGS__) |
| #define vnclip_wx_i16m4_m | ( | ... | ) | __riscv_vnclip_wx_i16m4_tumu(__VA_ARGS__) |
| #define vnclip_wx_i16mf2 | ( | ... | ) | __riscv_vnclip_wx_i16mf2(__VA_ARGS__) |
| #define vnclip_wx_i16mf2_m | ( | ... | ) | __riscv_vnclip_wx_i16mf2_tumu(__VA_ARGS__) |
| #define vnclip_wx_i16mf4 | ( | ... | ) | __riscv_vnclip_wx_i16mf4(__VA_ARGS__) |
| #define vnclip_wx_i16mf4_m | ( | ... | ) | __riscv_vnclip_wx_i16mf4_tumu(__VA_ARGS__) |
| #define vnclip_wx_i32m1 | ( | ... | ) | __riscv_vnclip_wx_i32m1(__VA_ARGS__) |
| #define vnclip_wx_i32m1_m | ( | ... | ) | __riscv_vnclip_wx_i32m1_tumu(__VA_ARGS__) |
| #define vnclip_wx_i32m2 | ( | ... | ) | __riscv_vnclip_wx_i32m2(__VA_ARGS__) |
| #define vnclip_wx_i32m2_m | ( | ... | ) | __riscv_vnclip_wx_i32m2_tumu(__VA_ARGS__) |
| #define vnclip_wx_i32m4 | ( | ... | ) | __riscv_vnclip_wx_i32m4(__VA_ARGS__) |
| #define vnclip_wx_i32m4_m | ( | ... | ) | __riscv_vnclip_wx_i32m4_tumu(__VA_ARGS__) |
| #define vnclip_wx_i32mf2 | ( | ... | ) | __riscv_vnclip_wx_i32mf2(__VA_ARGS__) |
| #define vnclip_wx_i32mf2_m | ( | ... | ) | __riscv_vnclip_wx_i32mf2_tumu(__VA_ARGS__) |
| #define vnclip_wx_i8m1 | ( | ... | ) | __riscv_vnclip_wx_i8m1(__VA_ARGS__) |
| #define vnclip_wx_i8m1_m | ( | ... | ) | __riscv_vnclip_wx_i8m1_tumu(__VA_ARGS__) |
| #define vnclip_wx_i8m2 | ( | ... | ) | __riscv_vnclip_wx_i8m2(__VA_ARGS__) |
| #define vnclip_wx_i8m2_m | ( | ... | ) | __riscv_vnclip_wx_i8m2_tumu(__VA_ARGS__) |
| #define vnclip_wx_i8m4 | ( | ... | ) | __riscv_vnclip_wx_i8m4(__VA_ARGS__) |
| #define vnclip_wx_i8m4_m | ( | ... | ) | __riscv_vnclip_wx_i8m4_tumu(__VA_ARGS__) |
| #define vnclip_wx_i8mf2 | ( | ... | ) | __riscv_vnclip_wx_i8mf2(__VA_ARGS__) |
| #define vnclip_wx_i8mf2_m | ( | ... | ) | __riscv_vnclip_wx_i8mf2_tumu(__VA_ARGS__) |
| #define vnclip_wx_i8mf4 | ( | ... | ) | __riscv_vnclip_wx_i8mf4(__VA_ARGS__) |
| #define vnclip_wx_i8mf4_m | ( | ... | ) | __riscv_vnclip_wx_i8mf4_tumu(__VA_ARGS__) |
| #define vnclip_wx_i8mf8 | ( | ... | ) | __riscv_vnclip_wx_i8mf8(__VA_ARGS__) |
| #define vnclip_wx_i8mf8_m | ( | ... | ) | __riscv_vnclip_wx_i8mf8_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u16m1 | ( | ... | ) | __riscv_vnclipu_wv_u16m1(__VA_ARGS__) |
| #define vnclipu_wv_u16m1_m | ( | ... | ) | __riscv_vnclipu_wv_u16m1_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u16m2 | ( | ... | ) | __riscv_vnclipu_wv_u16m2(__VA_ARGS__) |
| #define vnclipu_wv_u16m2_m | ( | ... | ) | __riscv_vnclipu_wv_u16m2_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u16m4 | ( | ... | ) | __riscv_vnclipu_wv_u16m4(__VA_ARGS__) |
| #define vnclipu_wv_u16m4_m | ( | ... | ) | __riscv_vnclipu_wv_u16m4_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u16mf2 | ( | ... | ) | __riscv_vnclipu_wv_u16mf2(__VA_ARGS__) |
| #define vnclipu_wv_u16mf2_m | ( | ... | ) | __riscv_vnclipu_wv_u16mf2_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u16mf4 | ( | ... | ) | __riscv_vnclipu_wv_u16mf4(__VA_ARGS__) |
| #define vnclipu_wv_u16mf4_m | ( | ... | ) | __riscv_vnclipu_wv_u16mf4_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u32m1 | ( | ... | ) | __riscv_vnclipu_wv_u32m1(__VA_ARGS__) |
| #define vnclipu_wv_u32m1_m | ( | ... | ) | __riscv_vnclipu_wv_u32m1_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u32m2 | ( | ... | ) | __riscv_vnclipu_wv_u32m2(__VA_ARGS__) |
| #define vnclipu_wv_u32m2_m | ( | ... | ) | __riscv_vnclipu_wv_u32m2_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u32m4 | ( | ... | ) | __riscv_vnclipu_wv_u32m4(__VA_ARGS__) |
| #define vnclipu_wv_u32m4_m | ( | ... | ) | __riscv_vnclipu_wv_u32m4_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u32mf2 | ( | ... | ) | __riscv_vnclipu_wv_u32mf2(__VA_ARGS__) |
| #define vnclipu_wv_u32mf2_m | ( | ... | ) | __riscv_vnclipu_wv_u32mf2_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u8m1 | ( | ... | ) | __riscv_vnclipu_wv_u8m1(__VA_ARGS__) |
| #define vnclipu_wv_u8m1_m | ( | ... | ) | __riscv_vnclipu_wv_u8m1_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u8m2 | ( | ... | ) | __riscv_vnclipu_wv_u8m2(__VA_ARGS__) |
| #define vnclipu_wv_u8m2_m | ( | ... | ) | __riscv_vnclipu_wv_u8m2_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u8m4 | ( | ... | ) | __riscv_vnclipu_wv_u8m4(__VA_ARGS__) |
| #define vnclipu_wv_u8m4_m | ( | ... | ) | __riscv_vnclipu_wv_u8m4_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u8mf2 | ( | ... | ) | __riscv_vnclipu_wv_u8mf2(__VA_ARGS__) |
| #define vnclipu_wv_u8mf2_m | ( | ... | ) | __riscv_vnclipu_wv_u8mf2_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u8mf4 | ( | ... | ) | __riscv_vnclipu_wv_u8mf4(__VA_ARGS__) |
| #define vnclipu_wv_u8mf4_m | ( | ... | ) | __riscv_vnclipu_wv_u8mf4_tumu(__VA_ARGS__) |
| #define vnclipu_wv_u8mf8 | ( | ... | ) | __riscv_vnclipu_wv_u8mf8(__VA_ARGS__) |
| #define vnclipu_wv_u8mf8_m | ( | ... | ) | __riscv_vnclipu_wv_u8mf8_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u16m1 | ( | ... | ) | __riscv_vnclipu_wx_u16m1(__VA_ARGS__) |
| #define vnclipu_wx_u16m1_m | ( | ... | ) | __riscv_vnclipu_wx_u16m1_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u16m2 | ( | ... | ) | __riscv_vnclipu_wx_u16m2(__VA_ARGS__) |
| #define vnclipu_wx_u16m2_m | ( | ... | ) | __riscv_vnclipu_wx_u16m2_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u16m4 | ( | ... | ) | __riscv_vnclipu_wx_u16m4(__VA_ARGS__) |
| #define vnclipu_wx_u16m4_m | ( | ... | ) | __riscv_vnclipu_wx_u16m4_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u16mf2 | ( | ... | ) | __riscv_vnclipu_wx_u16mf2(__VA_ARGS__) |
| #define vnclipu_wx_u16mf2_m | ( | ... | ) | __riscv_vnclipu_wx_u16mf2_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u16mf4 | ( | ... | ) | __riscv_vnclipu_wx_u16mf4(__VA_ARGS__) |
| #define vnclipu_wx_u16mf4_m | ( | ... | ) | __riscv_vnclipu_wx_u16mf4_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u32m1 | ( | ... | ) | __riscv_vnclipu_wx_u32m1(__VA_ARGS__) |
| #define vnclipu_wx_u32m1_m | ( | ... | ) | __riscv_vnclipu_wx_u32m1_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u32m2 | ( | ... | ) | __riscv_vnclipu_wx_u32m2(__VA_ARGS__) |
| #define vnclipu_wx_u32m2_m | ( | ... | ) | __riscv_vnclipu_wx_u32m2_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u32m4 | ( | ... | ) | __riscv_vnclipu_wx_u32m4(__VA_ARGS__) |
| #define vnclipu_wx_u32m4_m | ( | ... | ) | __riscv_vnclipu_wx_u32m4_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u32mf2 | ( | ... | ) | __riscv_vnclipu_wx_u32mf2(__VA_ARGS__) |
| #define vnclipu_wx_u32mf2_m | ( | ... | ) | __riscv_vnclipu_wx_u32mf2_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u8m1 | ( | ... | ) | __riscv_vnclipu_wx_u8m1(__VA_ARGS__) |
| #define vnclipu_wx_u8m1_m | ( | ... | ) | __riscv_vnclipu_wx_u8m1_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u8m2 | ( | ... | ) | __riscv_vnclipu_wx_u8m2(__VA_ARGS__) |
| #define vnclipu_wx_u8m2_m | ( | ... | ) | __riscv_vnclipu_wx_u8m2_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u8m4 | ( | ... | ) | __riscv_vnclipu_wx_u8m4(__VA_ARGS__) |
| #define vnclipu_wx_u8m4_m | ( | ... | ) | __riscv_vnclipu_wx_u8m4_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u8mf2 | ( | ... | ) | __riscv_vnclipu_wx_u8mf2(__VA_ARGS__) |
| #define vnclipu_wx_u8mf2_m | ( | ... | ) | __riscv_vnclipu_wx_u8mf2_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u8mf4 | ( | ... | ) | __riscv_vnclipu_wx_u8mf4(__VA_ARGS__) |
| #define vnclipu_wx_u8mf4_m | ( | ... | ) | __riscv_vnclipu_wx_u8mf4_tumu(__VA_ARGS__) |
| #define vnclipu_wx_u8mf8 | ( | ... | ) | __riscv_vnclipu_wx_u8mf8(__VA_ARGS__) |
| #define vnclipu_wx_u8mf8_m | ( | ... | ) | __riscv_vnclipu_wx_u8mf8_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i16m1 | ( | ... | ) | __riscv_vncvt_x_x_w_i16m1(__VA_ARGS__) |
| #define vncvt_x_x_w_i16m1_m | ( | ... | ) | __riscv_vncvt_x_x_w_i16m1_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i16m2 | ( | ... | ) | __riscv_vncvt_x_x_w_i16m2(__VA_ARGS__) |
| #define vncvt_x_x_w_i16m2_m | ( | ... | ) | __riscv_vncvt_x_x_w_i16m2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i16m4 | ( | ... | ) | __riscv_vncvt_x_x_w_i16m4(__VA_ARGS__) |
| #define vncvt_x_x_w_i16m4_m | ( | ... | ) | __riscv_vncvt_x_x_w_i16m4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i16mf2 | ( | ... | ) | __riscv_vncvt_x_x_w_i16mf2(__VA_ARGS__) |
| #define vncvt_x_x_w_i16mf2_m | ( | ... | ) | __riscv_vncvt_x_x_w_i16mf2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i16mf4 | ( | ... | ) | __riscv_vncvt_x_x_w_i16mf4(__VA_ARGS__) |
| #define vncvt_x_x_w_i16mf4_m | ( | ... | ) | __riscv_vncvt_x_x_w_i16mf4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i32m1 | ( | ... | ) | __riscv_vncvt_x_x_w_i32m1(__VA_ARGS__) |
| #define vncvt_x_x_w_i32m1_m | ( | ... | ) | __riscv_vncvt_x_x_w_i32m1_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i32m2 | ( | ... | ) | __riscv_vncvt_x_x_w_i32m2(__VA_ARGS__) |
| #define vncvt_x_x_w_i32m2_m | ( | ... | ) | __riscv_vncvt_x_x_w_i32m2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i32m4 | ( | ... | ) | __riscv_vncvt_x_x_w_i32m4(__VA_ARGS__) |
| #define vncvt_x_x_w_i32m4_m | ( | ... | ) | __riscv_vncvt_x_x_w_i32m4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i32mf2 | ( | ... | ) | __riscv_vncvt_x_x_w_i32mf2(__VA_ARGS__) |
| #define vncvt_x_x_w_i32mf2_m | ( | ... | ) | __riscv_vncvt_x_x_w_i32mf2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i8m1 | ( | ... | ) | __riscv_vncvt_x_x_w_i8m1(__VA_ARGS__) |
| #define vncvt_x_x_w_i8m1_m | ( | ... | ) | __riscv_vncvt_x_x_w_i8m1_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i8m2 | ( | ... | ) | __riscv_vncvt_x_x_w_i8m2(__VA_ARGS__) |
| #define vncvt_x_x_w_i8m2_m | ( | ... | ) | __riscv_vncvt_x_x_w_i8m2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i8m4 | ( | ... | ) | __riscv_vncvt_x_x_w_i8m4(__VA_ARGS__) |
| #define vncvt_x_x_w_i8m4_m | ( | ... | ) | __riscv_vncvt_x_x_w_i8m4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i8mf2 | ( | ... | ) | __riscv_vncvt_x_x_w_i8mf2(__VA_ARGS__) |
| #define vncvt_x_x_w_i8mf2_m | ( | ... | ) | __riscv_vncvt_x_x_w_i8mf2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i8mf4 | ( | ... | ) | __riscv_vncvt_x_x_w_i8mf4(__VA_ARGS__) |
| #define vncvt_x_x_w_i8mf4_m | ( | ... | ) | __riscv_vncvt_x_x_w_i8mf4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_i8mf8 | ( | ... | ) | __riscv_vncvt_x_x_w_i8mf8(__VA_ARGS__) |
| #define vncvt_x_x_w_i8mf8_m | ( | ... | ) | __riscv_vncvt_x_x_w_i8mf8_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u16m1 | ( | ... | ) | __riscv_vncvt_x_x_w_u16m1(__VA_ARGS__) |
| #define vncvt_x_x_w_u16m1_m | ( | ... | ) | __riscv_vncvt_x_x_w_u16m1_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u16m2 | ( | ... | ) | __riscv_vncvt_x_x_w_u16m2(__VA_ARGS__) |
| #define vncvt_x_x_w_u16m2_m | ( | ... | ) | __riscv_vncvt_x_x_w_u16m2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u16m4 | ( | ... | ) | __riscv_vncvt_x_x_w_u16m4(__VA_ARGS__) |
| #define vncvt_x_x_w_u16m4_m | ( | ... | ) | __riscv_vncvt_x_x_w_u16m4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u16mf2 | ( | ... | ) | __riscv_vncvt_x_x_w_u16mf2(__VA_ARGS__) |
| #define vncvt_x_x_w_u16mf2_m | ( | ... | ) | __riscv_vncvt_x_x_w_u16mf2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u16mf4 | ( | ... | ) | __riscv_vncvt_x_x_w_u16mf4(__VA_ARGS__) |
| #define vncvt_x_x_w_u16mf4_m | ( | ... | ) | __riscv_vncvt_x_x_w_u16mf4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u32m1 | ( | ... | ) | __riscv_vncvt_x_x_w_u32m1(__VA_ARGS__) |
| #define vncvt_x_x_w_u32m1_m | ( | ... | ) | __riscv_vncvt_x_x_w_u32m1_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u32m2 | ( | ... | ) | __riscv_vncvt_x_x_w_u32m2(__VA_ARGS__) |
| #define vncvt_x_x_w_u32m2_m | ( | ... | ) | __riscv_vncvt_x_x_w_u32m2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u32m4 | ( | ... | ) | __riscv_vncvt_x_x_w_u32m4(__VA_ARGS__) |
| #define vncvt_x_x_w_u32m4_m | ( | ... | ) | __riscv_vncvt_x_x_w_u32m4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u32mf2 | ( | ... | ) | __riscv_vncvt_x_x_w_u32mf2(__VA_ARGS__) |
| #define vncvt_x_x_w_u32mf2_m | ( | ... | ) | __riscv_vncvt_x_x_w_u32mf2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u8m1 | ( | ... | ) | __riscv_vncvt_x_x_w_u8m1(__VA_ARGS__) |
| #define vncvt_x_x_w_u8m1_m | ( | ... | ) | __riscv_vncvt_x_x_w_u8m1_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u8m2 | ( | ... | ) | __riscv_vncvt_x_x_w_u8m2(__VA_ARGS__) |
| #define vncvt_x_x_w_u8m2_m | ( | ... | ) | __riscv_vncvt_x_x_w_u8m2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u8m4 | ( | ... | ) | __riscv_vncvt_x_x_w_u8m4(__VA_ARGS__) |
| #define vncvt_x_x_w_u8m4_m | ( | ... | ) | __riscv_vncvt_x_x_w_u8m4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u8mf2 | ( | ... | ) | __riscv_vncvt_x_x_w_u8mf2(__VA_ARGS__) |
| #define vncvt_x_x_w_u8mf2_m | ( | ... | ) | __riscv_vncvt_x_x_w_u8mf2_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u8mf4 | ( | ... | ) | __riscv_vncvt_x_x_w_u8mf4(__VA_ARGS__) |
| #define vncvt_x_x_w_u8mf4_m | ( | ... | ) | __riscv_vncvt_x_x_w_u8mf4_tumu(__VA_ARGS__) |
| #define vncvt_x_x_w_u8mf8 | ( | ... | ) | __riscv_vncvt_x_x_w_u8mf8(__VA_ARGS__) |
| #define vncvt_x_x_w_u8mf8_m | ( | ... | ) | __riscv_vncvt_x_x_w_u8mf8_tumu(__VA_ARGS__) |
| #define vneg_v_i16m1 | ( | ... | ) | __riscv_vneg_v_i16m1(__VA_ARGS__) |
| #define vneg_v_i16m1_m | ( | ... | ) | __riscv_vneg_v_i16m1_tumu(__VA_ARGS__) |
| #define vneg_v_i16m2 | ( | ... | ) | __riscv_vneg_v_i16m2(__VA_ARGS__) |
| #define vneg_v_i16m2_m | ( | ... | ) | __riscv_vneg_v_i16m2_tumu(__VA_ARGS__) |
| #define vneg_v_i16m4 | ( | ... | ) | __riscv_vneg_v_i16m4(__VA_ARGS__) |
| #define vneg_v_i16m4_m | ( | ... | ) | __riscv_vneg_v_i16m4_tumu(__VA_ARGS__) |
| #define vneg_v_i16m8 | ( | ... | ) | __riscv_vneg_v_i16m8(__VA_ARGS__) |
| #define vneg_v_i16m8_m | ( | ... | ) | __riscv_vneg_v_i16m8_tumu(__VA_ARGS__) |
| #define vneg_v_i16mf2 | ( | ... | ) | __riscv_vneg_v_i16mf2(__VA_ARGS__) |
| #define vneg_v_i16mf2_m | ( | ... | ) | __riscv_vneg_v_i16mf2_tumu(__VA_ARGS__) |
| #define vneg_v_i16mf4 | ( | ... | ) | __riscv_vneg_v_i16mf4(__VA_ARGS__) |
| #define vneg_v_i16mf4_m | ( | ... | ) | __riscv_vneg_v_i16mf4_tumu(__VA_ARGS__) |
| #define vneg_v_i32m1 | ( | ... | ) | __riscv_vneg_v_i32m1(__VA_ARGS__) |
| #define vneg_v_i32m1_m | ( | ... | ) | __riscv_vneg_v_i32m1_tumu(__VA_ARGS__) |
| #define vneg_v_i32m2 | ( | ... | ) | __riscv_vneg_v_i32m2(__VA_ARGS__) |
| #define vneg_v_i32m2_m | ( | ... | ) | __riscv_vneg_v_i32m2_tumu(__VA_ARGS__) |
| #define vneg_v_i32m4 | ( | ... | ) | __riscv_vneg_v_i32m4(__VA_ARGS__) |
| #define vneg_v_i32m4_m | ( | ... | ) | __riscv_vneg_v_i32m4_tumu(__VA_ARGS__) |
| #define vneg_v_i32m8 | ( | ... | ) | __riscv_vneg_v_i32m8(__VA_ARGS__) |
| #define vneg_v_i32m8_m | ( | ... | ) | __riscv_vneg_v_i32m8_tumu(__VA_ARGS__) |
| #define vneg_v_i32mf2 | ( | ... | ) | __riscv_vneg_v_i32mf2(__VA_ARGS__) |
| #define vneg_v_i32mf2_m | ( | ... | ) | __riscv_vneg_v_i32mf2_tumu(__VA_ARGS__) |
| #define vneg_v_i64m1 | ( | ... | ) | __riscv_vneg_v_i64m1(__VA_ARGS__) |
| #define vneg_v_i64m1_m | ( | ... | ) | __riscv_vneg_v_i64m1_tumu(__VA_ARGS__) |
| #define vneg_v_i64m2 | ( | ... | ) | __riscv_vneg_v_i64m2(__VA_ARGS__) |
| #define vneg_v_i64m2_m | ( | ... | ) | __riscv_vneg_v_i64m2_tumu(__VA_ARGS__) |
| #define vneg_v_i64m4 | ( | ... | ) | __riscv_vneg_v_i64m4(__VA_ARGS__) |
| #define vneg_v_i64m4_m | ( | ... | ) | __riscv_vneg_v_i64m4_tumu(__VA_ARGS__) |
| #define vneg_v_i64m8 | ( | ... | ) | __riscv_vneg_v_i64m8(__VA_ARGS__) |
| #define vneg_v_i64m8_m | ( | ... | ) | __riscv_vneg_v_i64m8_tumu(__VA_ARGS__) |
| #define vneg_v_i8m1 | ( | ... | ) | __riscv_vneg_v_i8m1(__VA_ARGS__) |
| #define vneg_v_i8m1_m | ( | ... | ) | __riscv_vneg_v_i8m1_tumu(__VA_ARGS__) |
| #define vneg_v_i8m2 | ( | ... | ) | __riscv_vneg_v_i8m2(__VA_ARGS__) |
| #define vneg_v_i8m2_m | ( | ... | ) | __riscv_vneg_v_i8m2_tumu(__VA_ARGS__) |
| #define vneg_v_i8m4 | ( | ... | ) | __riscv_vneg_v_i8m4(__VA_ARGS__) |
| #define vneg_v_i8m4_m | ( | ... | ) | __riscv_vneg_v_i8m4_tumu(__VA_ARGS__) |
| #define vneg_v_i8m8 | ( | ... | ) | __riscv_vneg_v_i8m8(__VA_ARGS__) |
| #define vneg_v_i8m8_m | ( | ... | ) | __riscv_vneg_v_i8m8_tumu(__VA_ARGS__) |
| #define vneg_v_i8mf2 | ( | ... | ) | __riscv_vneg_v_i8mf2(__VA_ARGS__) |
| #define vneg_v_i8mf2_m | ( | ... | ) | __riscv_vneg_v_i8mf2_tumu(__VA_ARGS__) |
| #define vneg_v_i8mf4 | ( | ... | ) | __riscv_vneg_v_i8mf4(__VA_ARGS__) |
| #define vneg_v_i8mf4_m | ( | ... | ) | __riscv_vneg_v_i8mf4_tumu(__VA_ARGS__) |
| #define vneg_v_i8mf8 | ( | ... | ) | __riscv_vneg_v_i8mf8(__VA_ARGS__) |
| #define vneg_v_i8mf8_m | ( | ... | ) | __riscv_vneg_v_i8mf8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i16m1 | ( | ... | ) | __riscv_vnmsac_vv_i16m1_tu(__VA_ARGS__) |
| #define vnmsac_vv_i16m1_m | ( | ... | ) | __riscv_vnmsac_vv_i16m1_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i16m2 | ( | ... | ) | __riscv_vnmsac_vv_i16m2_tu(__VA_ARGS__) |
| #define vnmsac_vv_i16m2_m | ( | ... | ) | __riscv_vnmsac_vv_i16m2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i16m4 | ( | ... | ) | __riscv_vnmsac_vv_i16m4_tu(__VA_ARGS__) |
| #define vnmsac_vv_i16m4_m | ( | ... | ) | __riscv_vnmsac_vv_i16m4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i16m8 | ( | ... | ) | __riscv_vnmsac_vv_i16m8_tu(__VA_ARGS__) |
| #define vnmsac_vv_i16m8_m | ( | ... | ) | __riscv_vnmsac_vv_i16m8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i16mf2 | ( | ... | ) | __riscv_vnmsac_vv_i16mf2_tu(__VA_ARGS__) |
| #define vnmsac_vv_i16mf2_m | ( | ... | ) | __riscv_vnmsac_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i16mf4 | ( | ... | ) | __riscv_vnmsac_vv_i16mf4_tu(__VA_ARGS__) |
| #define vnmsac_vv_i16mf4_m | ( | ... | ) | __riscv_vnmsac_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i32m1 | ( | ... | ) | __riscv_vnmsac_vv_i32m1_tu(__VA_ARGS__) |
| #define vnmsac_vv_i32m1_m | ( | ... | ) | __riscv_vnmsac_vv_i32m1_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i32m2 | ( | ... | ) | __riscv_vnmsac_vv_i32m2_tu(__VA_ARGS__) |
| #define vnmsac_vv_i32m2_m | ( | ... | ) | __riscv_vnmsac_vv_i32m2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i32m4 | ( | ... | ) | __riscv_vnmsac_vv_i32m4_tu(__VA_ARGS__) |
| #define vnmsac_vv_i32m4_m | ( | ... | ) | __riscv_vnmsac_vv_i32m4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i32m8 | ( | ... | ) | __riscv_vnmsac_vv_i32m8_tu(__VA_ARGS__) |
| #define vnmsac_vv_i32m8_m | ( | ... | ) | __riscv_vnmsac_vv_i32m8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i32mf2 | ( | ... | ) | __riscv_vnmsac_vv_i32mf2_tu(__VA_ARGS__) |
| #define vnmsac_vv_i32mf2_m | ( | ... | ) | __riscv_vnmsac_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i64m1 | ( | ... | ) | __riscv_vnmsac_vv_i64m1_tu(__VA_ARGS__) |
| #define vnmsac_vv_i64m1_m | ( | ... | ) | __riscv_vnmsac_vv_i64m1_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i64m2 | ( | ... | ) | __riscv_vnmsac_vv_i64m2_tu(__VA_ARGS__) |
| #define vnmsac_vv_i64m2_m | ( | ... | ) | __riscv_vnmsac_vv_i64m2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i64m4 | ( | ... | ) | __riscv_vnmsac_vv_i64m4_tu(__VA_ARGS__) |
| #define vnmsac_vv_i64m4_m | ( | ... | ) | __riscv_vnmsac_vv_i64m4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i64m8 | ( | ... | ) | __riscv_vnmsac_vv_i64m8_tu(__VA_ARGS__) |
| #define vnmsac_vv_i64m8_m | ( | ... | ) | __riscv_vnmsac_vv_i64m8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i8m1 | ( | ... | ) | __riscv_vnmsac_vv_i8m1_tu(__VA_ARGS__) |
| #define vnmsac_vv_i8m1_m | ( | ... | ) | __riscv_vnmsac_vv_i8m1_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i8m2 | ( | ... | ) | __riscv_vnmsac_vv_i8m2_tu(__VA_ARGS__) |
| #define vnmsac_vv_i8m2_m | ( | ... | ) | __riscv_vnmsac_vv_i8m2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i8m4 | ( | ... | ) | __riscv_vnmsac_vv_i8m4_tu(__VA_ARGS__) |
| #define vnmsac_vv_i8m4_m | ( | ... | ) | __riscv_vnmsac_vv_i8m4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i8m8 | ( | ... | ) | __riscv_vnmsac_vv_i8m8_tu(__VA_ARGS__) |
| #define vnmsac_vv_i8m8_m | ( | ... | ) | __riscv_vnmsac_vv_i8m8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i8mf2 | ( | ... | ) | __riscv_vnmsac_vv_i8mf2_tu(__VA_ARGS__) |
| #define vnmsac_vv_i8mf2_m | ( | ... | ) | __riscv_vnmsac_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i8mf4 | ( | ... | ) | __riscv_vnmsac_vv_i8mf4_tu(__VA_ARGS__) |
| #define vnmsac_vv_i8mf4_m | ( | ... | ) | __riscv_vnmsac_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_i8mf8 | ( | ... | ) | __riscv_vnmsac_vv_i8mf8_tu(__VA_ARGS__) |
| #define vnmsac_vv_i8mf8_m | ( | ... | ) | __riscv_vnmsac_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u16m1 | ( | ... | ) | __riscv_vnmsac_vv_u16m1_tu(__VA_ARGS__) |
| #define vnmsac_vv_u16m1_m | ( | ... | ) | __riscv_vnmsac_vv_u16m1_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u16m2 | ( | ... | ) | __riscv_vnmsac_vv_u16m2_tu(__VA_ARGS__) |
| #define vnmsac_vv_u16m2_m | ( | ... | ) | __riscv_vnmsac_vv_u16m2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u16m4 | ( | ... | ) | __riscv_vnmsac_vv_u16m4_tu(__VA_ARGS__) |
| #define vnmsac_vv_u16m4_m | ( | ... | ) | __riscv_vnmsac_vv_u16m4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u16m8 | ( | ... | ) | __riscv_vnmsac_vv_u16m8_tu(__VA_ARGS__) |
| #define vnmsac_vv_u16m8_m | ( | ... | ) | __riscv_vnmsac_vv_u16m8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u16mf2 | ( | ... | ) | __riscv_vnmsac_vv_u16mf2_tu(__VA_ARGS__) |
| #define vnmsac_vv_u16mf2_m | ( | ... | ) | __riscv_vnmsac_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u16mf4 | ( | ... | ) | __riscv_vnmsac_vv_u16mf4_tu(__VA_ARGS__) |
| #define vnmsac_vv_u16mf4_m | ( | ... | ) | __riscv_vnmsac_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u32m1 | ( | ... | ) | __riscv_vnmsac_vv_u32m1_tu(__VA_ARGS__) |
| #define vnmsac_vv_u32m1_m | ( | ... | ) | __riscv_vnmsac_vv_u32m1_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u32m2 | ( | ... | ) | __riscv_vnmsac_vv_u32m2_tu(__VA_ARGS__) |
| #define vnmsac_vv_u32m2_m | ( | ... | ) | __riscv_vnmsac_vv_u32m2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u32m4 | ( | ... | ) | __riscv_vnmsac_vv_u32m4_tu(__VA_ARGS__) |
| #define vnmsac_vv_u32m4_m | ( | ... | ) | __riscv_vnmsac_vv_u32m4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u32m8 | ( | ... | ) | __riscv_vnmsac_vv_u32m8_tu(__VA_ARGS__) |
| #define vnmsac_vv_u32m8_m | ( | ... | ) | __riscv_vnmsac_vv_u32m8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u32mf2 | ( | ... | ) | __riscv_vnmsac_vv_u32mf2_tu(__VA_ARGS__) |
| #define vnmsac_vv_u32mf2_m | ( | ... | ) | __riscv_vnmsac_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u64m1 | ( | ... | ) | __riscv_vnmsac_vv_u64m1_tu(__VA_ARGS__) |
| #define vnmsac_vv_u64m1_m | ( | ... | ) | __riscv_vnmsac_vv_u64m1_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u64m2 | ( | ... | ) | __riscv_vnmsac_vv_u64m2_tu(__VA_ARGS__) |
| #define vnmsac_vv_u64m2_m | ( | ... | ) | __riscv_vnmsac_vv_u64m2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u64m4 | ( | ... | ) | __riscv_vnmsac_vv_u64m4_tu(__VA_ARGS__) |
| #define vnmsac_vv_u64m4_m | ( | ... | ) | __riscv_vnmsac_vv_u64m4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u64m8 | ( | ... | ) | __riscv_vnmsac_vv_u64m8_tu(__VA_ARGS__) |
| #define vnmsac_vv_u64m8_m | ( | ... | ) | __riscv_vnmsac_vv_u64m8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u8m1 | ( | ... | ) | __riscv_vnmsac_vv_u8m1_tu(__VA_ARGS__) |
| #define vnmsac_vv_u8m1_m | ( | ... | ) | __riscv_vnmsac_vv_u8m1_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u8m2 | ( | ... | ) | __riscv_vnmsac_vv_u8m2_tu(__VA_ARGS__) |
| #define vnmsac_vv_u8m2_m | ( | ... | ) | __riscv_vnmsac_vv_u8m2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u8m4 | ( | ... | ) | __riscv_vnmsac_vv_u8m4_tu(__VA_ARGS__) |
| #define vnmsac_vv_u8m4_m | ( | ... | ) | __riscv_vnmsac_vv_u8m4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u8m8 | ( | ... | ) | __riscv_vnmsac_vv_u8m8_tu(__VA_ARGS__) |
| #define vnmsac_vv_u8m8_m | ( | ... | ) | __riscv_vnmsac_vv_u8m8_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u8mf2 | ( | ... | ) | __riscv_vnmsac_vv_u8mf2_tu(__VA_ARGS__) |
| #define vnmsac_vv_u8mf2_m | ( | ... | ) | __riscv_vnmsac_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u8mf4 | ( | ... | ) | __riscv_vnmsac_vv_u8mf4_tu(__VA_ARGS__) |
| #define vnmsac_vv_u8mf4_m | ( | ... | ) | __riscv_vnmsac_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vnmsac_vv_u8mf8 | ( | ... | ) | __riscv_vnmsac_vv_u8mf8_tu(__VA_ARGS__) |
| #define vnmsac_vv_u8mf8_m | ( | ... | ) | __riscv_vnmsac_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i16m1 | ( | ... | ) | __riscv_vnmsac_vx_i16m1_tu(__VA_ARGS__) |
| #define vnmsac_vx_i16m1_m | ( | ... | ) | __riscv_vnmsac_vx_i16m1_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i16m2 | ( | ... | ) | __riscv_vnmsac_vx_i16m2_tu(__VA_ARGS__) |
| #define vnmsac_vx_i16m2_m | ( | ... | ) | __riscv_vnmsac_vx_i16m2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i16m4 | ( | ... | ) | __riscv_vnmsac_vx_i16m4_tu(__VA_ARGS__) |
| #define vnmsac_vx_i16m4_m | ( | ... | ) | __riscv_vnmsac_vx_i16m4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i16m8 | ( | ... | ) | __riscv_vnmsac_vx_i16m8_tu(__VA_ARGS__) |
| #define vnmsac_vx_i16m8_m | ( | ... | ) | __riscv_vnmsac_vx_i16m8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i16mf2 | ( | ... | ) | __riscv_vnmsac_vx_i16mf2_tu(__VA_ARGS__) |
| #define vnmsac_vx_i16mf2_m | ( | ... | ) | __riscv_vnmsac_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i16mf4 | ( | ... | ) | __riscv_vnmsac_vx_i16mf4_tu(__VA_ARGS__) |
| #define vnmsac_vx_i16mf4_m | ( | ... | ) | __riscv_vnmsac_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i32m1 | ( | ... | ) | __riscv_vnmsac_vx_i32m1_tu(__VA_ARGS__) |
| #define vnmsac_vx_i32m1_m | ( | ... | ) | __riscv_vnmsac_vx_i32m1_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i32m2 | ( | ... | ) | __riscv_vnmsac_vx_i32m2_tu(__VA_ARGS__) |
| #define vnmsac_vx_i32m2_m | ( | ... | ) | __riscv_vnmsac_vx_i32m2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i32m4 | ( | ... | ) | __riscv_vnmsac_vx_i32m4_tu(__VA_ARGS__) |
| #define vnmsac_vx_i32m4_m | ( | ... | ) | __riscv_vnmsac_vx_i32m4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i32m8 | ( | ... | ) | __riscv_vnmsac_vx_i32m8_tu(__VA_ARGS__) |
| #define vnmsac_vx_i32m8_m | ( | ... | ) | __riscv_vnmsac_vx_i32m8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i32mf2 | ( | ... | ) | __riscv_vnmsac_vx_i32mf2_tu(__VA_ARGS__) |
| #define vnmsac_vx_i32mf2_m | ( | ... | ) | __riscv_vnmsac_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i64m1 | ( | ... | ) | __riscv_vnmsac_vx_i64m1_tu(__VA_ARGS__) |
| #define vnmsac_vx_i64m1_m | ( | ... | ) | __riscv_vnmsac_vx_i64m1_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i64m2 | ( | ... | ) | __riscv_vnmsac_vx_i64m2_tu(__VA_ARGS__) |
| #define vnmsac_vx_i64m2_m | ( | ... | ) | __riscv_vnmsac_vx_i64m2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i64m4 | ( | ... | ) | __riscv_vnmsac_vx_i64m4_tu(__VA_ARGS__) |
| #define vnmsac_vx_i64m4_m | ( | ... | ) | __riscv_vnmsac_vx_i64m4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i64m8 | ( | ... | ) | __riscv_vnmsac_vx_i64m8_tu(__VA_ARGS__) |
| #define vnmsac_vx_i64m8_m | ( | ... | ) | __riscv_vnmsac_vx_i64m8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i8m1 | ( | ... | ) | __riscv_vnmsac_vx_i8m1_tu(__VA_ARGS__) |
| #define vnmsac_vx_i8m1_m | ( | ... | ) | __riscv_vnmsac_vx_i8m1_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i8m2 | ( | ... | ) | __riscv_vnmsac_vx_i8m2_tu(__VA_ARGS__) |
| #define vnmsac_vx_i8m2_m | ( | ... | ) | __riscv_vnmsac_vx_i8m2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i8m4 | ( | ... | ) | __riscv_vnmsac_vx_i8m4_tu(__VA_ARGS__) |
| #define vnmsac_vx_i8m4_m | ( | ... | ) | __riscv_vnmsac_vx_i8m4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i8m8 | ( | ... | ) | __riscv_vnmsac_vx_i8m8_tu(__VA_ARGS__) |
| #define vnmsac_vx_i8m8_m | ( | ... | ) | __riscv_vnmsac_vx_i8m8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i8mf2 | ( | ... | ) | __riscv_vnmsac_vx_i8mf2_tu(__VA_ARGS__) |
| #define vnmsac_vx_i8mf2_m | ( | ... | ) | __riscv_vnmsac_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i8mf4 | ( | ... | ) | __riscv_vnmsac_vx_i8mf4_tu(__VA_ARGS__) |
| #define vnmsac_vx_i8mf4_m | ( | ... | ) | __riscv_vnmsac_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_i8mf8 | ( | ... | ) | __riscv_vnmsac_vx_i8mf8_tu(__VA_ARGS__) |
| #define vnmsac_vx_i8mf8_m | ( | ... | ) | __riscv_vnmsac_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u16m1 | ( | ... | ) | __riscv_vnmsac_vx_u16m1_tu(__VA_ARGS__) |
| #define vnmsac_vx_u16m1_m | ( | ... | ) | __riscv_vnmsac_vx_u16m1_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u16m2 | ( | ... | ) | __riscv_vnmsac_vx_u16m2_tu(__VA_ARGS__) |
| #define vnmsac_vx_u16m2_m | ( | ... | ) | __riscv_vnmsac_vx_u16m2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u16m4 | ( | ... | ) | __riscv_vnmsac_vx_u16m4_tu(__VA_ARGS__) |
| #define vnmsac_vx_u16m4_m | ( | ... | ) | __riscv_vnmsac_vx_u16m4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u16m8 | ( | ... | ) | __riscv_vnmsac_vx_u16m8_tu(__VA_ARGS__) |
| #define vnmsac_vx_u16m8_m | ( | ... | ) | __riscv_vnmsac_vx_u16m8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u16mf2 | ( | ... | ) | __riscv_vnmsac_vx_u16mf2_tu(__VA_ARGS__) |
| #define vnmsac_vx_u16mf2_m | ( | ... | ) | __riscv_vnmsac_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u16mf4 | ( | ... | ) | __riscv_vnmsac_vx_u16mf4_tu(__VA_ARGS__) |
| #define vnmsac_vx_u16mf4_m | ( | ... | ) | __riscv_vnmsac_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u32m1 | ( | ... | ) | __riscv_vnmsac_vx_u32m1_tu(__VA_ARGS__) |
| #define vnmsac_vx_u32m1_m | ( | ... | ) | __riscv_vnmsac_vx_u32m1_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u32m2 | ( | ... | ) | __riscv_vnmsac_vx_u32m2_tu(__VA_ARGS__) |
| #define vnmsac_vx_u32m2_m | ( | ... | ) | __riscv_vnmsac_vx_u32m2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u32m4 | ( | ... | ) | __riscv_vnmsac_vx_u32m4_tu(__VA_ARGS__) |
| #define vnmsac_vx_u32m4_m | ( | ... | ) | __riscv_vnmsac_vx_u32m4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u32m8 | ( | ... | ) | __riscv_vnmsac_vx_u32m8_tu(__VA_ARGS__) |
| #define vnmsac_vx_u32m8_m | ( | ... | ) | __riscv_vnmsac_vx_u32m8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u32mf2 | ( | ... | ) | __riscv_vnmsac_vx_u32mf2_tu(__VA_ARGS__) |
| #define vnmsac_vx_u32mf2_m | ( | ... | ) | __riscv_vnmsac_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u64m1 | ( | ... | ) | __riscv_vnmsac_vx_u64m1_tu(__VA_ARGS__) |
| #define vnmsac_vx_u64m1_m | ( | ... | ) | __riscv_vnmsac_vx_u64m1_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u64m2 | ( | ... | ) | __riscv_vnmsac_vx_u64m2_tu(__VA_ARGS__) |
| #define vnmsac_vx_u64m2_m | ( | ... | ) | __riscv_vnmsac_vx_u64m2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u64m4 | ( | ... | ) | __riscv_vnmsac_vx_u64m4_tu(__VA_ARGS__) |
| #define vnmsac_vx_u64m4_m | ( | ... | ) | __riscv_vnmsac_vx_u64m4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u64m8 | ( | ... | ) | __riscv_vnmsac_vx_u64m8_tu(__VA_ARGS__) |
| #define vnmsac_vx_u64m8_m | ( | ... | ) | __riscv_vnmsac_vx_u64m8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u8m1 | ( | ... | ) | __riscv_vnmsac_vx_u8m1_tu(__VA_ARGS__) |
| #define vnmsac_vx_u8m1_m | ( | ... | ) | __riscv_vnmsac_vx_u8m1_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u8m2 | ( | ... | ) | __riscv_vnmsac_vx_u8m2_tu(__VA_ARGS__) |
| #define vnmsac_vx_u8m2_m | ( | ... | ) | __riscv_vnmsac_vx_u8m2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u8m4 | ( | ... | ) | __riscv_vnmsac_vx_u8m4_tu(__VA_ARGS__) |
| #define vnmsac_vx_u8m4_m | ( | ... | ) | __riscv_vnmsac_vx_u8m4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u8m8 | ( | ... | ) | __riscv_vnmsac_vx_u8m8_tu(__VA_ARGS__) |
| #define vnmsac_vx_u8m8_m | ( | ... | ) | __riscv_vnmsac_vx_u8m8_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u8mf2 | ( | ... | ) | __riscv_vnmsac_vx_u8mf2_tu(__VA_ARGS__) |
| #define vnmsac_vx_u8mf2_m | ( | ... | ) | __riscv_vnmsac_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u8mf4 | ( | ... | ) | __riscv_vnmsac_vx_u8mf4_tu(__VA_ARGS__) |
| #define vnmsac_vx_u8mf4_m | ( | ... | ) | __riscv_vnmsac_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vnmsac_vx_u8mf8 | ( | ... | ) | __riscv_vnmsac_vx_u8mf8_tu(__VA_ARGS__) |
| #define vnmsac_vx_u8mf8_m | ( | ... | ) | __riscv_vnmsac_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i16m1 | ( | ... | ) | __riscv_vnmsub_vv_i16m1_tu(__VA_ARGS__) |
| #define vnmsub_vv_i16m1_m | ( | ... | ) | __riscv_vnmsub_vv_i16m1_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i16m2 | ( | ... | ) | __riscv_vnmsub_vv_i16m2_tu(__VA_ARGS__) |
| #define vnmsub_vv_i16m2_m | ( | ... | ) | __riscv_vnmsub_vv_i16m2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i16m4 | ( | ... | ) | __riscv_vnmsub_vv_i16m4_tu(__VA_ARGS__) |
| #define vnmsub_vv_i16m4_m | ( | ... | ) | __riscv_vnmsub_vv_i16m4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i16m8 | ( | ... | ) | __riscv_vnmsub_vv_i16m8_tu(__VA_ARGS__) |
| #define vnmsub_vv_i16m8_m | ( | ... | ) | __riscv_vnmsub_vv_i16m8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i16mf2 | ( | ... | ) | __riscv_vnmsub_vv_i16mf2_tu(__VA_ARGS__) |
| #define vnmsub_vv_i16mf2_m | ( | ... | ) | __riscv_vnmsub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i16mf4 | ( | ... | ) | __riscv_vnmsub_vv_i16mf4_tu(__VA_ARGS__) |
| #define vnmsub_vv_i16mf4_m | ( | ... | ) | __riscv_vnmsub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i32m1 | ( | ... | ) | __riscv_vnmsub_vv_i32m1_tu(__VA_ARGS__) |
| #define vnmsub_vv_i32m1_m | ( | ... | ) | __riscv_vnmsub_vv_i32m1_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i32m2 | ( | ... | ) | __riscv_vnmsub_vv_i32m2_tu(__VA_ARGS__) |
| #define vnmsub_vv_i32m2_m | ( | ... | ) | __riscv_vnmsub_vv_i32m2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i32m4 | ( | ... | ) | __riscv_vnmsub_vv_i32m4_tu(__VA_ARGS__) |
| #define vnmsub_vv_i32m4_m | ( | ... | ) | __riscv_vnmsub_vv_i32m4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i32m8 | ( | ... | ) | __riscv_vnmsub_vv_i32m8_tu(__VA_ARGS__) |
| #define vnmsub_vv_i32m8_m | ( | ... | ) | __riscv_vnmsub_vv_i32m8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i32mf2 | ( | ... | ) | __riscv_vnmsub_vv_i32mf2_tu(__VA_ARGS__) |
| #define vnmsub_vv_i32mf2_m | ( | ... | ) | __riscv_vnmsub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i64m1 | ( | ... | ) | __riscv_vnmsub_vv_i64m1_tu(__VA_ARGS__) |
| #define vnmsub_vv_i64m1_m | ( | ... | ) | __riscv_vnmsub_vv_i64m1_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i64m2 | ( | ... | ) | __riscv_vnmsub_vv_i64m2_tu(__VA_ARGS__) |
| #define vnmsub_vv_i64m2_m | ( | ... | ) | __riscv_vnmsub_vv_i64m2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i64m4 | ( | ... | ) | __riscv_vnmsub_vv_i64m4_tu(__VA_ARGS__) |
| #define vnmsub_vv_i64m4_m | ( | ... | ) | __riscv_vnmsub_vv_i64m4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i64m8 | ( | ... | ) | __riscv_vnmsub_vv_i64m8_tu(__VA_ARGS__) |
| #define vnmsub_vv_i64m8_m | ( | ... | ) | __riscv_vnmsub_vv_i64m8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i8m1 | ( | ... | ) | __riscv_vnmsub_vv_i8m1_tu(__VA_ARGS__) |
| #define vnmsub_vv_i8m1_m | ( | ... | ) | __riscv_vnmsub_vv_i8m1_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i8m2 | ( | ... | ) | __riscv_vnmsub_vv_i8m2_tu(__VA_ARGS__) |
| #define vnmsub_vv_i8m2_m | ( | ... | ) | __riscv_vnmsub_vv_i8m2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i8m4 | ( | ... | ) | __riscv_vnmsub_vv_i8m4_tu(__VA_ARGS__) |
| #define vnmsub_vv_i8m4_m | ( | ... | ) | __riscv_vnmsub_vv_i8m4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i8m8 | ( | ... | ) | __riscv_vnmsub_vv_i8m8_tu(__VA_ARGS__) |
| #define vnmsub_vv_i8m8_m | ( | ... | ) | __riscv_vnmsub_vv_i8m8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i8mf2 | ( | ... | ) | __riscv_vnmsub_vv_i8mf2_tu(__VA_ARGS__) |
| #define vnmsub_vv_i8mf2_m | ( | ... | ) | __riscv_vnmsub_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i8mf4 | ( | ... | ) | __riscv_vnmsub_vv_i8mf4_tu(__VA_ARGS__) |
| #define vnmsub_vv_i8mf4_m | ( | ... | ) | __riscv_vnmsub_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_i8mf8 | ( | ... | ) | __riscv_vnmsub_vv_i8mf8_tu(__VA_ARGS__) |
| #define vnmsub_vv_i8mf8_m | ( | ... | ) | __riscv_vnmsub_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u16m1 | ( | ... | ) | __riscv_vnmsub_vv_u16m1_tu(__VA_ARGS__) |
| #define vnmsub_vv_u16m1_m | ( | ... | ) | __riscv_vnmsub_vv_u16m1_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u16m2 | ( | ... | ) | __riscv_vnmsub_vv_u16m2_tu(__VA_ARGS__) |
| #define vnmsub_vv_u16m2_m | ( | ... | ) | __riscv_vnmsub_vv_u16m2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u16m4 | ( | ... | ) | __riscv_vnmsub_vv_u16m4_tu(__VA_ARGS__) |
| #define vnmsub_vv_u16m4_m | ( | ... | ) | __riscv_vnmsub_vv_u16m4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u16m8 | ( | ... | ) | __riscv_vnmsub_vv_u16m8_tu(__VA_ARGS__) |
| #define vnmsub_vv_u16m8_m | ( | ... | ) | __riscv_vnmsub_vv_u16m8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u16mf2 | ( | ... | ) | __riscv_vnmsub_vv_u16mf2_tu(__VA_ARGS__) |
| #define vnmsub_vv_u16mf2_m | ( | ... | ) | __riscv_vnmsub_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u16mf4 | ( | ... | ) | __riscv_vnmsub_vv_u16mf4_tu(__VA_ARGS__) |
| #define vnmsub_vv_u16mf4_m | ( | ... | ) | __riscv_vnmsub_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u32m1 | ( | ... | ) | __riscv_vnmsub_vv_u32m1_tu(__VA_ARGS__) |
| #define vnmsub_vv_u32m1_m | ( | ... | ) | __riscv_vnmsub_vv_u32m1_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u32m2 | ( | ... | ) | __riscv_vnmsub_vv_u32m2_tu(__VA_ARGS__) |
| #define vnmsub_vv_u32m2_m | ( | ... | ) | __riscv_vnmsub_vv_u32m2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u32m4 | ( | ... | ) | __riscv_vnmsub_vv_u32m4_tu(__VA_ARGS__) |
| #define vnmsub_vv_u32m4_m | ( | ... | ) | __riscv_vnmsub_vv_u32m4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u32m8 | ( | ... | ) | __riscv_vnmsub_vv_u32m8_tu(__VA_ARGS__) |
| #define vnmsub_vv_u32m8_m | ( | ... | ) | __riscv_vnmsub_vv_u32m8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u32mf2 | ( | ... | ) | __riscv_vnmsub_vv_u32mf2_tu(__VA_ARGS__) |
| #define vnmsub_vv_u32mf2_m | ( | ... | ) | __riscv_vnmsub_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u64m1 | ( | ... | ) | __riscv_vnmsub_vv_u64m1_tu(__VA_ARGS__) |
| #define vnmsub_vv_u64m1_m | ( | ... | ) | __riscv_vnmsub_vv_u64m1_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u64m2 | ( | ... | ) | __riscv_vnmsub_vv_u64m2_tu(__VA_ARGS__) |
| #define vnmsub_vv_u64m2_m | ( | ... | ) | __riscv_vnmsub_vv_u64m2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u64m4 | ( | ... | ) | __riscv_vnmsub_vv_u64m4_tu(__VA_ARGS__) |
| #define vnmsub_vv_u64m4_m | ( | ... | ) | __riscv_vnmsub_vv_u64m4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u64m8 | ( | ... | ) | __riscv_vnmsub_vv_u64m8_tu(__VA_ARGS__) |
| #define vnmsub_vv_u64m8_m | ( | ... | ) | __riscv_vnmsub_vv_u64m8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u8m1 | ( | ... | ) | __riscv_vnmsub_vv_u8m1_tu(__VA_ARGS__) |
| #define vnmsub_vv_u8m1_m | ( | ... | ) | __riscv_vnmsub_vv_u8m1_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u8m2 | ( | ... | ) | __riscv_vnmsub_vv_u8m2_tu(__VA_ARGS__) |
| #define vnmsub_vv_u8m2_m | ( | ... | ) | __riscv_vnmsub_vv_u8m2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u8m4 | ( | ... | ) | __riscv_vnmsub_vv_u8m4_tu(__VA_ARGS__) |
| #define vnmsub_vv_u8m4_m | ( | ... | ) | __riscv_vnmsub_vv_u8m4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u8m8 | ( | ... | ) | __riscv_vnmsub_vv_u8m8_tu(__VA_ARGS__) |
| #define vnmsub_vv_u8m8_m | ( | ... | ) | __riscv_vnmsub_vv_u8m8_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u8mf2 | ( | ... | ) | __riscv_vnmsub_vv_u8mf2_tu(__VA_ARGS__) |
| #define vnmsub_vv_u8mf2_m | ( | ... | ) | __riscv_vnmsub_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u8mf4 | ( | ... | ) | __riscv_vnmsub_vv_u8mf4_tu(__VA_ARGS__) |
| #define vnmsub_vv_u8mf4_m | ( | ... | ) | __riscv_vnmsub_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vnmsub_vv_u8mf8 | ( | ... | ) | __riscv_vnmsub_vv_u8mf8_tu(__VA_ARGS__) |
| #define vnmsub_vv_u8mf8_m | ( | ... | ) | __riscv_vnmsub_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i16m1 | ( | ... | ) | __riscv_vnmsub_vx_i16m1_tu(__VA_ARGS__) |
| #define vnmsub_vx_i16m1_m | ( | ... | ) | __riscv_vnmsub_vx_i16m1_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i16m2 | ( | ... | ) | __riscv_vnmsub_vx_i16m2_tu(__VA_ARGS__) |
| #define vnmsub_vx_i16m2_m | ( | ... | ) | __riscv_vnmsub_vx_i16m2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i16m4 | ( | ... | ) | __riscv_vnmsub_vx_i16m4_tu(__VA_ARGS__) |
| #define vnmsub_vx_i16m4_m | ( | ... | ) | __riscv_vnmsub_vx_i16m4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i16m8 | ( | ... | ) | __riscv_vnmsub_vx_i16m8_tu(__VA_ARGS__) |
| #define vnmsub_vx_i16m8_m | ( | ... | ) | __riscv_vnmsub_vx_i16m8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i16mf2 | ( | ... | ) | __riscv_vnmsub_vx_i16mf2_tu(__VA_ARGS__) |
| #define vnmsub_vx_i16mf2_m | ( | ... | ) | __riscv_vnmsub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i16mf4 | ( | ... | ) | __riscv_vnmsub_vx_i16mf4_tu(__VA_ARGS__) |
| #define vnmsub_vx_i16mf4_m | ( | ... | ) | __riscv_vnmsub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i32m1 | ( | ... | ) | __riscv_vnmsub_vx_i32m1_tu(__VA_ARGS__) |
| #define vnmsub_vx_i32m1_m | ( | ... | ) | __riscv_vnmsub_vx_i32m1_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i32m2 | ( | ... | ) | __riscv_vnmsub_vx_i32m2_tu(__VA_ARGS__) |
| #define vnmsub_vx_i32m2_m | ( | ... | ) | __riscv_vnmsub_vx_i32m2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i32m4 | ( | ... | ) | __riscv_vnmsub_vx_i32m4_tu(__VA_ARGS__) |
| #define vnmsub_vx_i32m4_m | ( | ... | ) | __riscv_vnmsub_vx_i32m4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i32m8 | ( | ... | ) | __riscv_vnmsub_vx_i32m8_tu(__VA_ARGS__) |
| #define vnmsub_vx_i32m8_m | ( | ... | ) | __riscv_vnmsub_vx_i32m8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i32mf2 | ( | ... | ) | __riscv_vnmsub_vx_i32mf2_tu(__VA_ARGS__) |
| #define vnmsub_vx_i32mf2_m | ( | ... | ) | __riscv_vnmsub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i64m1 | ( | ... | ) | __riscv_vnmsub_vx_i64m1_tu(__VA_ARGS__) |
| #define vnmsub_vx_i64m1_m | ( | ... | ) | __riscv_vnmsub_vx_i64m1_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i64m2 | ( | ... | ) | __riscv_vnmsub_vx_i64m2_tu(__VA_ARGS__) |
| #define vnmsub_vx_i64m2_m | ( | ... | ) | __riscv_vnmsub_vx_i64m2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i64m4 | ( | ... | ) | __riscv_vnmsub_vx_i64m4_tu(__VA_ARGS__) |
| #define vnmsub_vx_i64m4_m | ( | ... | ) | __riscv_vnmsub_vx_i64m4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i64m8 | ( | ... | ) | __riscv_vnmsub_vx_i64m8_tu(__VA_ARGS__) |
| #define vnmsub_vx_i64m8_m | ( | ... | ) | __riscv_vnmsub_vx_i64m8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i8m1 | ( | ... | ) | __riscv_vnmsub_vx_i8m1_tu(__VA_ARGS__) |
| #define vnmsub_vx_i8m1_m | ( | ... | ) | __riscv_vnmsub_vx_i8m1_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i8m2 | ( | ... | ) | __riscv_vnmsub_vx_i8m2_tu(__VA_ARGS__) |
| #define vnmsub_vx_i8m2_m | ( | ... | ) | __riscv_vnmsub_vx_i8m2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i8m4 | ( | ... | ) | __riscv_vnmsub_vx_i8m4_tu(__VA_ARGS__) |
| #define vnmsub_vx_i8m4_m | ( | ... | ) | __riscv_vnmsub_vx_i8m4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i8m8 | ( | ... | ) | __riscv_vnmsub_vx_i8m8_tu(__VA_ARGS__) |
| #define vnmsub_vx_i8m8_m | ( | ... | ) | __riscv_vnmsub_vx_i8m8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i8mf2 | ( | ... | ) | __riscv_vnmsub_vx_i8mf2_tu(__VA_ARGS__) |
| #define vnmsub_vx_i8mf2_m | ( | ... | ) | __riscv_vnmsub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i8mf4 | ( | ... | ) | __riscv_vnmsub_vx_i8mf4_tu(__VA_ARGS__) |
| #define vnmsub_vx_i8mf4_m | ( | ... | ) | __riscv_vnmsub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_i8mf8 | ( | ... | ) | __riscv_vnmsub_vx_i8mf8_tu(__VA_ARGS__) |
| #define vnmsub_vx_i8mf8_m | ( | ... | ) | __riscv_vnmsub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u16m1 | ( | ... | ) | __riscv_vnmsub_vx_u16m1_tu(__VA_ARGS__) |
| #define vnmsub_vx_u16m1_m | ( | ... | ) | __riscv_vnmsub_vx_u16m1_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u16m2 | ( | ... | ) | __riscv_vnmsub_vx_u16m2_tu(__VA_ARGS__) |
| #define vnmsub_vx_u16m2_m | ( | ... | ) | __riscv_vnmsub_vx_u16m2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u16m4 | ( | ... | ) | __riscv_vnmsub_vx_u16m4_tu(__VA_ARGS__) |
| #define vnmsub_vx_u16m4_m | ( | ... | ) | __riscv_vnmsub_vx_u16m4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u16m8 | ( | ... | ) | __riscv_vnmsub_vx_u16m8_tu(__VA_ARGS__) |
| #define vnmsub_vx_u16m8_m | ( | ... | ) | __riscv_vnmsub_vx_u16m8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u16mf2 | ( | ... | ) | __riscv_vnmsub_vx_u16mf2_tu(__VA_ARGS__) |
| #define vnmsub_vx_u16mf2_m | ( | ... | ) | __riscv_vnmsub_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u16mf4 | ( | ... | ) | __riscv_vnmsub_vx_u16mf4_tu(__VA_ARGS__) |
| #define vnmsub_vx_u16mf4_m | ( | ... | ) | __riscv_vnmsub_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u32m1 | ( | ... | ) | __riscv_vnmsub_vx_u32m1_tu(__VA_ARGS__) |
| #define vnmsub_vx_u32m1_m | ( | ... | ) | __riscv_vnmsub_vx_u32m1_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u32m2 | ( | ... | ) | __riscv_vnmsub_vx_u32m2_tu(__VA_ARGS__) |
| #define vnmsub_vx_u32m2_m | ( | ... | ) | __riscv_vnmsub_vx_u32m2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u32m4 | ( | ... | ) | __riscv_vnmsub_vx_u32m4_tu(__VA_ARGS__) |
| #define vnmsub_vx_u32m4_m | ( | ... | ) | __riscv_vnmsub_vx_u32m4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u32m8 | ( | ... | ) | __riscv_vnmsub_vx_u32m8_tu(__VA_ARGS__) |
| #define vnmsub_vx_u32m8_m | ( | ... | ) | __riscv_vnmsub_vx_u32m8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u32mf2 | ( | ... | ) | __riscv_vnmsub_vx_u32mf2_tu(__VA_ARGS__) |
| #define vnmsub_vx_u32mf2_m | ( | ... | ) | __riscv_vnmsub_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u64m1 | ( | ... | ) | __riscv_vnmsub_vx_u64m1_tu(__VA_ARGS__) |
| #define vnmsub_vx_u64m1_m | ( | ... | ) | __riscv_vnmsub_vx_u64m1_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u64m2 | ( | ... | ) | __riscv_vnmsub_vx_u64m2_tu(__VA_ARGS__) |
| #define vnmsub_vx_u64m2_m | ( | ... | ) | __riscv_vnmsub_vx_u64m2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u64m4 | ( | ... | ) | __riscv_vnmsub_vx_u64m4_tu(__VA_ARGS__) |
| #define vnmsub_vx_u64m4_m | ( | ... | ) | __riscv_vnmsub_vx_u64m4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u64m8 | ( | ... | ) | __riscv_vnmsub_vx_u64m8_tu(__VA_ARGS__) |
| #define vnmsub_vx_u64m8_m | ( | ... | ) | __riscv_vnmsub_vx_u64m8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u8m1 | ( | ... | ) | __riscv_vnmsub_vx_u8m1_tu(__VA_ARGS__) |
| #define vnmsub_vx_u8m1_m | ( | ... | ) | __riscv_vnmsub_vx_u8m1_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u8m2 | ( | ... | ) | __riscv_vnmsub_vx_u8m2_tu(__VA_ARGS__) |
| #define vnmsub_vx_u8m2_m | ( | ... | ) | __riscv_vnmsub_vx_u8m2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u8m4 | ( | ... | ) | __riscv_vnmsub_vx_u8m4_tu(__VA_ARGS__) |
| #define vnmsub_vx_u8m4_m | ( | ... | ) | __riscv_vnmsub_vx_u8m4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u8m8 | ( | ... | ) | __riscv_vnmsub_vx_u8m8_tu(__VA_ARGS__) |
| #define vnmsub_vx_u8m8_m | ( | ... | ) | __riscv_vnmsub_vx_u8m8_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u8mf2 | ( | ... | ) | __riscv_vnmsub_vx_u8mf2_tu(__VA_ARGS__) |
| #define vnmsub_vx_u8mf2_m | ( | ... | ) | __riscv_vnmsub_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u8mf4 | ( | ... | ) | __riscv_vnmsub_vx_u8mf4_tu(__VA_ARGS__) |
| #define vnmsub_vx_u8mf4_m | ( | ... | ) | __riscv_vnmsub_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vnmsub_vx_u8mf8 | ( | ... | ) | __riscv_vnmsub_vx_u8mf8_tu(__VA_ARGS__) |
| #define vnmsub_vx_u8mf8_m | ( | ... | ) | __riscv_vnmsub_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vnot_v_i16m1 | ( | ... | ) | __riscv_vnot_v_i16m1(__VA_ARGS__) |
| #define vnot_v_i16m1_m | ( | ... | ) | __riscv_vnot_v_i16m1_tumu(__VA_ARGS__) |
| #define vnot_v_i16m2 | ( | ... | ) | __riscv_vnot_v_i16m2(__VA_ARGS__) |
| #define vnot_v_i16m2_m | ( | ... | ) | __riscv_vnot_v_i16m2_tumu(__VA_ARGS__) |
| #define vnot_v_i16m4 | ( | ... | ) | __riscv_vnot_v_i16m4(__VA_ARGS__) |
| #define vnot_v_i16m4_m | ( | ... | ) | __riscv_vnot_v_i16m4_tumu(__VA_ARGS__) |
| #define vnot_v_i16m8 | ( | ... | ) | __riscv_vnot_v_i16m8(__VA_ARGS__) |
| #define vnot_v_i16m8_m | ( | ... | ) | __riscv_vnot_v_i16m8_tumu(__VA_ARGS__) |
| #define vnot_v_i16mf2 | ( | ... | ) | __riscv_vnot_v_i16mf2(__VA_ARGS__) |
| #define vnot_v_i16mf2_m | ( | ... | ) | __riscv_vnot_v_i16mf2_tumu(__VA_ARGS__) |
| #define vnot_v_i16mf4 | ( | ... | ) | __riscv_vnot_v_i16mf4(__VA_ARGS__) |
| #define vnot_v_i16mf4_m | ( | ... | ) | __riscv_vnot_v_i16mf4_tumu(__VA_ARGS__) |
| #define vnot_v_i32m1 | ( | ... | ) | __riscv_vnot_v_i32m1(__VA_ARGS__) |
| #define vnot_v_i32m1_m | ( | ... | ) | __riscv_vnot_v_i32m1_tumu(__VA_ARGS__) |
| #define vnot_v_i32m2 | ( | ... | ) | __riscv_vnot_v_i32m2(__VA_ARGS__) |
| #define vnot_v_i32m2_m | ( | ... | ) | __riscv_vnot_v_i32m2_tumu(__VA_ARGS__) |
| #define vnot_v_i32m4 | ( | ... | ) | __riscv_vnot_v_i32m4(__VA_ARGS__) |
| #define vnot_v_i32m4_m | ( | ... | ) | __riscv_vnot_v_i32m4_tumu(__VA_ARGS__) |
| #define vnot_v_i32m8 | ( | ... | ) | __riscv_vnot_v_i32m8(__VA_ARGS__) |
| #define vnot_v_i32m8_m | ( | ... | ) | __riscv_vnot_v_i32m8_tumu(__VA_ARGS__) |
| #define vnot_v_i32mf2 | ( | ... | ) | __riscv_vnot_v_i32mf2(__VA_ARGS__) |
| #define vnot_v_i32mf2_m | ( | ... | ) | __riscv_vnot_v_i32mf2_tumu(__VA_ARGS__) |
| #define vnot_v_i64m1 | ( | ... | ) | __riscv_vnot_v_i64m1(__VA_ARGS__) |
| #define vnot_v_i64m1_m | ( | ... | ) | __riscv_vnot_v_i64m1_tumu(__VA_ARGS__) |
| #define vnot_v_i64m2 | ( | ... | ) | __riscv_vnot_v_i64m2(__VA_ARGS__) |
| #define vnot_v_i64m2_m | ( | ... | ) | __riscv_vnot_v_i64m2_tumu(__VA_ARGS__) |
| #define vnot_v_i64m4 | ( | ... | ) | __riscv_vnot_v_i64m4(__VA_ARGS__) |
| #define vnot_v_i64m4_m | ( | ... | ) | __riscv_vnot_v_i64m4_tumu(__VA_ARGS__) |
| #define vnot_v_i64m8 | ( | ... | ) | __riscv_vnot_v_i64m8(__VA_ARGS__) |
| #define vnot_v_i64m8_m | ( | ... | ) | __riscv_vnot_v_i64m8_tumu(__VA_ARGS__) |
| #define vnot_v_i8m1 | ( | ... | ) | __riscv_vnot_v_i8m1(__VA_ARGS__) |
| #define vnot_v_i8m1_m | ( | ... | ) | __riscv_vnot_v_i8m1_tumu(__VA_ARGS__) |
| #define vnot_v_i8m2 | ( | ... | ) | __riscv_vnot_v_i8m2(__VA_ARGS__) |
| #define vnot_v_i8m2_m | ( | ... | ) | __riscv_vnot_v_i8m2_tumu(__VA_ARGS__) |
| #define vnot_v_i8m4 | ( | ... | ) | __riscv_vnot_v_i8m4(__VA_ARGS__) |
| #define vnot_v_i8m4_m | ( | ... | ) | __riscv_vnot_v_i8m4_tumu(__VA_ARGS__) |
| #define vnot_v_i8m8 | ( | ... | ) | __riscv_vnot_v_i8m8(__VA_ARGS__) |
| #define vnot_v_i8m8_m | ( | ... | ) | __riscv_vnot_v_i8m8_tumu(__VA_ARGS__) |
| #define vnot_v_i8mf2 | ( | ... | ) | __riscv_vnot_v_i8mf2(__VA_ARGS__) |
| #define vnot_v_i8mf2_m | ( | ... | ) | __riscv_vnot_v_i8mf2_tumu(__VA_ARGS__) |
| #define vnot_v_i8mf4 | ( | ... | ) | __riscv_vnot_v_i8mf4(__VA_ARGS__) |
| #define vnot_v_i8mf4_m | ( | ... | ) | __riscv_vnot_v_i8mf4_tumu(__VA_ARGS__) |
| #define vnot_v_i8mf8 | ( | ... | ) | __riscv_vnot_v_i8mf8(__VA_ARGS__) |
| #define vnot_v_i8mf8_m | ( | ... | ) | __riscv_vnot_v_i8mf8_tumu(__VA_ARGS__) |
| #define vnot_v_u16m1 | ( | ... | ) | __riscv_vnot_v_u16m1(__VA_ARGS__) |
| #define vnot_v_u16m1_m | ( | ... | ) | __riscv_vnot_v_u16m1_tumu(__VA_ARGS__) |
| #define vnot_v_u16m2 | ( | ... | ) | __riscv_vnot_v_u16m2(__VA_ARGS__) |
| #define vnot_v_u16m2_m | ( | ... | ) | __riscv_vnot_v_u16m2_tumu(__VA_ARGS__) |
| #define vnot_v_u16m4 | ( | ... | ) | __riscv_vnot_v_u16m4(__VA_ARGS__) |
| #define vnot_v_u16m4_m | ( | ... | ) | __riscv_vnot_v_u16m4_tumu(__VA_ARGS__) |
| #define vnot_v_u16m8 | ( | ... | ) | __riscv_vnot_v_u16m8(__VA_ARGS__) |
| #define vnot_v_u16m8_m | ( | ... | ) | __riscv_vnot_v_u16m8_tumu(__VA_ARGS__) |
| #define vnot_v_u16mf2 | ( | ... | ) | __riscv_vnot_v_u16mf2(__VA_ARGS__) |
| #define vnot_v_u16mf2_m | ( | ... | ) | __riscv_vnot_v_u16mf2_tumu(__VA_ARGS__) |
| #define vnot_v_u16mf4 | ( | ... | ) | __riscv_vnot_v_u16mf4(__VA_ARGS__) |
| #define vnot_v_u16mf4_m | ( | ... | ) | __riscv_vnot_v_u16mf4_tumu(__VA_ARGS__) |
| #define vnot_v_u32m1 | ( | ... | ) | __riscv_vnot_v_u32m1(__VA_ARGS__) |
| #define vnot_v_u32m1_m | ( | ... | ) | __riscv_vnot_v_u32m1_tumu(__VA_ARGS__) |
| #define vnot_v_u32m2 | ( | ... | ) | __riscv_vnot_v_u32m2(__VA_ARGS__) |
| #define vnot_v_u32m2_m | ( | ... | ) | __riscv_vnot_v_u32m2_tumu(__VA_ARGS__) |
| #define vnot_v_u32m4 | ( | ... | ) | __riscv_vnot_v_u32m4(__VA_ARGS__) |
| #define vnot_v_u32m4_m | ( | ... | ) | __riscv_vnot_v_u32m4_tumu(__VA_ARGS__) |
| #define vnot_v_u32m8 | ( | ... | ) | __riscv_vnot_v_u32m8(__VA_ARGS__) |
| #define vnot_v_u32m8_m | ( | ... | ) | __riscv_vnot_v_u32m8_tumu(__VA_ARGS__) |
| #define vnot_v_u32mf2 | ( | ... | ) | __riscv_vnot_v_u32mf2(__VA_ARGS__) |
| #define vnot_v_u32mf2_m | ( | ... | ) | __riscv_vnot_v_u32mf2_tumu(__VA_ARGS__) |
| #define vnot_v_u64m1 | ( | ... | ) | __riscv_vnot_v_u64m1(__VA_ARGS__) |
| #define vnot_v_u64m1_m | ( | ... | ) | __riscv_vnot_v_u64m1_tumu(__VA_ARGS__) |
| #define vnot_v_u64m2 | ( | ... | ) | __riscv_vnot_v_u64m2(__VA_ARGS__) |
| #define vnot_v_u64m2_m | ( | ... | ) | __riscv_vnot_v_u64m2_tumu(__VA_ARGS__) |
| #define vnot_v_u64m4 | ( | ... | ) | __riscv_vnot_v_u64m4(__VA_ARGS__) |
| #define vnot_v_u64m4_m | ( | ... | ) | __riscv_vnot_v_u64m4_tumu(__VA_ARGS__) |
| #define vnot_v_u64m8 | ( | ... | ) | __riscv_vnot_v_u64m8(__VA_ARGS__) |
| #define vnot_v_u64m8_m | ( | ... | ) | __riscv_vnot_v_u64m8_tumu(__VA_ARGS__) |
| #define vnot_v_u8m1 | ( | ... | ) | __riscv_vnot_v_u8m1(__VA_ARGS__) |
| #define vnot_v_u8m1_m | ( | ... | ) | __riscv_vnot_v_u8m1_tumu(__VA_ARGS__) |
| #define vnot_v_u8m2 | ( | ... | ) | __riscv_vnot_v_u8m2(__VA_ARGS__) |
| #define vnot_v_u8m2_m | ( | ... | ) | __riscv_vnot_v_u8m2_tumu(__VA_ARGS__) |
| #define vnot_v_u8m4 | ( | ... | ) | __riscv_vnot_v_u8m4(__VA_ARGS__) |
| #define vnot_v_u8m4_m | ( | ... | ) | __riscv_vnot_v_u8m4_tumu(__VA_ARGS__) |
| #define vnot_v_u8m8 | ( | ... | ) | __riscv_vnot_v_u8m8(__VA_ARGS__) |
| #define vnot_v_u8m8_m | ( | ... | ) | __riscv_vnot_v_u8m8_tumu(__VA_ARGS__) |
| #define vnot_v_u8mf2 | ( | ... | ) | __riscv_vnot_v_u8mf2(__VA_ARGS__) |
| #define vnot_v_u8mf2_m | ( | ... | ) | __riscv_vnot_v_u8mf2_tumu(__VA_ARGS__) |
| #define vnot_v_u8mf4 | ( | ... | ) | __riscv_vnot_v_u8mf4(__VA_ARGS__) |
| #define vnot_v_u8mf4_m | ( | ... | ) | __riscv_vnot_v_u8mf4_tumu(__VA_ARGS__) |
| #define vnot_v_u8mf8 | ( | ... | ) | __riscv_vnot_v_u8mf8(__VA_ARGS__) |
| #define vnot_v_u8mf8_m | ( | ... | ) | __riscv_vnot_v_u8mf8_tumu(__VA_ARGS__) |
| #define vnsra_wv_i16m1 | ( | ... | ) | __riscv_vnsra_wv_i16m1(__VA_ARGS__) |
| #define vnsra_wv_i16m1_m | ( | ... | ) | __riscv_vnsra_wv_i16m1_tumu(__VA_ARGS__) |
| #define vnsra_wv_i16m2 | ( | ... | ) | __riscv_vnsra_wv_i16m2(__VA_ARGS__) |
| #define vnsra_wv_i16m2_m | ( | ... | ) | __riscv_vnsra_wv_i16m2_tumu(__VA_ARGS__) |
| #define vnsra_wv_i16m4 | ( | ... | ) | __riscv_vnsra_wv_i16m4(__VA_ARGS__) |
| #define vnsra_wv_i16m4_m | ( | ... | ) | __riscv_vnsra_wv_i16m4_tumu(__VA_ARGS__) |
| #define vnsra_wv_i16mf2 | ( | ... | ) | __riscv_vnsra_wv_i16mf2(__VA_ARGS__) |
| #define vnsra_wv_i16mf2_m | ( | ... | ) | __riscv_vnsra_wv_i16mf2_tumu(__VA_ARGS__) |
| #define vnsra_wv_i16mf4 | ( | ... | ) | __riscv_vnsra_wv_i16mf4(__VA_ARGS__) |
| #define vnsra_wv_i16mf4_m | ( | ... | ) | __riscv_vnsra_wv_i16mf4_tumu(__VA_ARGS__) |
| #define vnsra_wv_i32m1 | ( | ... | ) | __riscv_vnsra_wv_i32m1(__VA_ARGS__) |
| #define vnsra_wv_i32m1_m | ( | ... | ) | __riscv_vnsra_wv_i32m1_tumu(__VA_ARGS__) |
| #define vnsra_wv_i32m2 | ( | ... | ) | __riscv_vnsra_wv_i32m2(__VA_ARGS__) |
| #define vnsra_wv_i32m2_m | ( | ... | ) | __riscv_vnsra_wv_i32m2_tumu(__VA_ARGS__) |
| #define vnsra_wv_i32m4 | ( | ... | ) | __riscv_vnsra_wv_i32m4(__VA_ARGS__) |
| #define vnsra_wv_i32m4_m | ( | ... | ) | __riscv_vnsra_wv_i32m4_tumu(__VA_ARGS__) |
| #define vnsra_wv_i32mf2 | ( | ... | ) | __riscv_vnsra_wv_i32mf2(__VA_ARGS__) |
| #define vnsra_wv_i32mf2_m | ( | ... | ) | __riscv_vnsra_wv_i32mf2_tumu(__VA_ARGS__) |
| #define vnsra_wv_i8m1 | ( | ... | ) | __riscv_vnsra_wv_i8m1(__VA_ARGS__) |
| #define vnsra_wv_i8m1_m | ( | ... | ) | __riscv_vnsra_wv_i8m1_tumu(__VA_ARGS__) |
| #define vnsra_wv_i8m2 | ( | ... | ) | __riscv_vnsra_wv_i8m2(__VA_ARGS__) |
| #define vnsra_wv_i8m2_m | ( | ... | ) | __riscv_vnsra_wv_i8m2_tumu(__VA_ARGS__) |
| #define vnsra_wv_i8m4 | ( | ... | ) | __riscv_vnsra_wv_i8m4(__VA_ARGS__) |
| #define vnsra_wv_i8m4_m | ( | ... | ) | __riscv_vnsra_wv_i8m4_tumu(__VA_ARGS__) |
| #define vnsra_wv_i8mf2 | ( | ... | ) | __riscv_vnsra_wv_i8mf2(__VA_ARGS__) |
| #define vnsra_wv_i8mf2_m | ( | ... | ) | __riscv_vnsra_wv_i8mf2_tumu(__VA_ARGS__) |
| #define vnsra_wv_i8mf4 | ( | ... | ) | __riscv_vnsra_wv_i8mf4(__VA_ARGS__) |
| #define vnsra_wv_i8mf4_m | ( | ... | ) | __riscv_vnsra_wv_i8mf4_tumu(__VA_ARGS__) |
| #define vnsra_wv_i8mf8 | ( | ... | ) | __riscv_vnsra_wv_i8mf8(__VA_ARGS__) |
| #define vnsra_wv_i8mf8_m | ( | ... | ) | __riscv_vnsra_wv_i8mf8_tumu(__VA_ARGS__) |
| #define vnsra_wx_i16m1 | ( | ... | ) | __riscv_vnsra_wx_i16m1(__VA_ARGS__) |
| #define vnsra_wx_i16m1_m | ( | ... | ) | __riscv_vnsra_wx_i16m1_tumu(__VA_ARGS__) |
| #define vnsra_wx_i16m2 | ( | ... | ) | __riscv_vnsra_wx_i16m2(__VA_ARGS__) |
| #define vnsra_wx_i16m2_m | ( | ... | ) | __riscv_vnsra_wx_i16m2_tumu(__VA_ARGS__) |
| #define vnsra_wx_i16m4 | ( | ... | ) | __riscv_vnsra_wx_i16m4(__VA_ARGS__) |
| #define vnsra_wx_i16m4_m | ( | ... | ) | __riscv_vnsra_wx_i16m4_tumu(__VA_ARGS__) |
| #define vnsra_wx_i16mf2 | ( | ... | ) | __riscv_vnsra_wx_i16mf2(__VA_ARGS__) |
| #define vnsra_wx_i16mf2_m | ( | ... | ) | __riscv_vnsra_wx_i16mf2_tumu(__VA_ARGS__) |
| #define vnsra_wx_i16mf4 | ( | ... | ) | __riscv_vnsra_wx_i16mf4(__VA_ARGS__) |
| #define vnsra_wx_i16mf4_m | ( | ... | ) | __riscv_vnsra_wx_i16mf4_tumu(__VA_ARGS__) |
| #define vnsra_wx_i32m1 | ( | ... | ) | __riscv_vnsra_wx_i32m1(__VA_ARGS__) |
| #define vnsra_wx_i32m1_m | ( | ... | ) | __riscv_vnsra_wx_i32m1_tumu(__VA_ARGS__) |
| #define vnsra_wx_i32m2 | ( | ... | ) | __riscv_vnsra_wx_i32m2(__VA_ARGS__) |
| #define vnsra_wx_i32m2_m | ( | ... | ) | __riscv_vnsra_wx_i32m2_tumu(__VA_ARGS__) |
| #define vnsra_wx_i32m4 | ( | ... | ) | __riscv_vnsra_wx_i32m4(__VA_ARGS__) |
| #define vnsra_wx_i32m4_m | ( | ... | ) | __riscv_vnsra_wx_i32m4_tumu(__VA_ARGS__) |
| #define vnsra_wx_i32mf2 | ( | ... | ) | __riscv_vnsra_wx_i32mf2(__VA_ARGS__) |
| #define vnsra_wx_i32mf2_m | ( | ... | ) | __riscv_vnsra_wx_i32mf2_tumu(__VA_ARGS__) |
| #define vnsra_wx_i8m1 | ( | ... | ) | __riscv_vnsra_wx_i8m1(__VA_ARGS__) |
| #define vnsra_wx_i8m1_m | ( | ... | ) | __riscv_vnsra_wx_i8m1_tumu(__VA_ARGS__) |
| #define vnsra_wx_i8m2 | ( | ... | ) | __riscv_vnsra_wx_i8m2(__VA_ARGS__) |
| #define vnsra_wx_i8m2_m | ( | ... | ) | __riscv_vnsra_wx_i8m2_tumu(__VA_ARGS__) |
| #define vnsra_wx_i8m4 | ( | ... | ) | __riscv_vnsra_wx_i8m4(__VA_ARGS__) |
| #define vnsra_wx_i8m4_m | ( | ... | ) | __riscv_vnsra_wx_i8m4_tumu(__VA_ARGS__) |
| #define vnsra_wx_i8mf2 | ( | ... | ) | __riscv_vnsra_wx_i8mf2(__VA_ARGS__) |
| #define vnsra_wx_i8mf2_m | ( | ... | ) | __riscv_vnsra_wx_i8mf2_tumu(__VA_ARGS__) |
| #define vnsra_wx_i8mf4 | ( | ... | ) | __riscv_vnsra_wx_i8mf4(__VA_ARGS__) |
| #define vnsra_wx_i8mf4_m | ( | ... | ) | __riscv_vnsra_wx_i8mf4_tumu(__VA_ARGS__) |
| #define vnsra_wx_i8mf8 | ( | ... | ) | __riscv_vnsra_wx_i8mf8(__VA_ARGS__) |
| #define vnsra_wx_i8mf8_m | ( | ... | ) | __riscv_vnsra_wx_i8mf8_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u16m1 | ( | ... | ) | __riscv_vnsrl_wv_u16m1(__VA_ARGS__) |
| #define vnsrl_wv_u16m1_m | ( | ... | ) | __riscv_vnsrl_wv_u16m1_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u16m2 | ( | ... | ) | __riscv_vnsrl_wv_u16m2(__VA_ARGS__) |
| #define vnsrl_wv_u16m2_m | ( | ... | ) | __riscv_vnsrl_wv_u16m2_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u16m4 | ( | ... | ) | __riscv_vnsrl_wv_u16m4(__VA_ARGS__) |
| #define vnsrl_wv_u16m4_m | ( | ... | ) | __riscv_vnsrl_wv_u16m4_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u16mf2 | ( | ... | ) | __riscv_vnsrl_wv_u16mf2(__VA_ARGS__) |
| #define vnsrl_wv_u16mf2_m | ( | ... | ) | __riscv_vnsrl_wv_u16mf2_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u16mf4 | ( | ... | ) | __riscv_vnsrl_wv_u16mf4(__VA_ARGS__) |
| #define vnsrl_wv_u16mf4_m | ( | ... | ) | __riscv_vnsrl_wv_u16mf4_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u32m1 | ( | ... | ) | __riscv_vnsrl_wv_u32m1(__VA_ARGS__) |
| #define vnsrl_wv_u32m1_m | ( | ... | ) | __riscv_vnsrl_wv_u32m1_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u32m2 | ( | ... | ) | __riscv_vnsrl_wv_u32m2(__VA_ARGS__) |
| #define vnsrl_wv_u32m2_m | ( | ... | ) | __riscv_vnsrl_wv_u32m2_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u32m4 | ( | ... | ) | __riscv_vnsrl_wv_u32m4(__VA_ARGS__) |
| #define vnsrl_wv_u32m4_m | ( | ... | ) | __riscv_vnsrl_wv_u32m4_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u32mf2 | ( | ... | ) | __riscv_vnsrl_wv_u32mf2(__VA_ARGS__) |
| #define vnsrl_wv_u32mf2_m | ( | ... | ) | __riscv_vnsrl_wv_u32mf2_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u8m1 | ( | ... | ) | __riscv_vnsrl_wv_u8m1(__VA_ARGS__) |
| #define vnsrl_wv_u8m1_m | ( | ... | ) | __riscv_vnsrl_wv_u8m1_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u8m2 | ( | ... | ) | __riscv_vnsrl_wv_u8m2(__VA_ARGS__) |
| #define vnsrl_wv_u8m2_m | ( | ... | ) | __riscv_vnsrl_wv_u8m2_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u8m4 | ( | ... | ) | __riscv_vnsrl_wv_u8m4(__VA_ARGS__) |
| #define vnsrl_wv_u8m4_m | ( | ... | ) | __riscv_vnsrl_wv_u8m4_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u8mf2 | ( | ... | ) | __riscv_vnsrl_wv_u8mf2(__VA_ARGS__) |
| #define vnsrl_wv_u8mf2_m | ( | ... | ) | __riscv_vnsrl_wv_u8mf2_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u8mf4 | ( | ... | ) | __riscv_vnsrl_wv_u8mf4(__VA_ARGS__) |
| #define vnsrl_wv_u8mf4_m | ( | ... | ) | __riscv_vnsrl_wv_u8mf4_tumu(__VA_ARGS__) |
| #define vnsrl_wv_u8mf8 | ( | ... | ) | __riscv_vnsrl_wv_u8mf8(__VA_ARGS__) |
| #define vnsrl_wv_u8mf8_m | ( | ... | ) | __riscv_vnsrl_wv_u8mf8_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u16m1 | ( | ... | ) | __riscv_vnsrl_wx_u16m1(__VA_ARGS__) |
| #define vnsrl_wx_u16m1_m | ( | ... | ) | __riscv_vnsrl_wx_u16m1_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u16m2 | ( | ... | ) | __riscv_vnsrl_wx_u16m2(__VA_ARGS__) |
| #define vnsrl_wx_u16m2_m | ( | ... | ) | __riscv_vnsrl_wx_u16m2_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u16m4 | ( | ... | ) | __riscv_vnsrl_wx_u16m4(__VA_ARGS__) |
| #define vnsrl_wx_u16m4_m | ( | ... | ) | __riscv_vnsrl_wx_u16m4_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u16mf2 | ( | ... | ) | __riscv_vnsrl_wx_u16mf2(__VA_ARGS__) |
| #define vnsrl_wx_u16mf2_m | ( | ... | ) | __riscv_vnsrl_wx_u16mf2_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u16mf4 | ( | ... | ) | __riscv_vnsrl_wx_u16mf4(__VA_ARGS__) |
| #define vnsrl_wx_u16mf4_m | ( | ... | ) | __riscv_vnsrl_wx_u16mf4_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u32m1 | ( | ... | ) | __riscv_vnsrl_wx_u32m1(__VA_ARGS__) |
| #define vnsrl_wx_u32m1_m | ( | ... | ) | __riscv_vnsrl_wx_u32m1_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u32m2 | ( | ... | ) | __riscv_vnsrl_wx_u32m2(__VA_ARGS__) |
| #define vnsrl_wx_u32m2_m | ( | ... | ) | __riscv_vnsrl_wx_u32m2_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u32m4 | ( | ... | ) | __riscv_vnsrl_wx_u32m4(__VA_ARGS__) |
| #define vnsrl_wx_u32m4_m | ( | ... | ) | __riscv_vnsrl_wx_u32m4_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u32mf2 | ( | ... | ) | __riscv_vnsrl_wx_u32mf2(__VA_ARGS__) |
| #define vnsrl_wx_u32mf2_m | ( | ... | ) | __riscv_vnsrl_wx_u32mf2_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u8m1 | ( | ... | ) | __riscv_vnsrl_wx_u8m1(__VA_ARGS__) |
| #define vnsrl_wx_u8m1_m | ( | ... | ) | __riscv_vnsrl_wx_u8m1_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u8m2 | ( | ... | ) | __riscv_vnsrl_wx_u8m2(__VA_ARGS__) |
| #define vnsrl_wx_u8m2_m | ( | ... | ) | __riscv_vnsrl_wx_u8m2_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u8m4 | ( | ... | ) | __riscv_vnsrl_wx_u8m4(__VA_ARGS__) |
| #define vnsrl_wx_u8m4_m | ( | ... | ) | __riscv_vnsrl_wx_u8m4_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u8mf2 | ( | ... | ) | __riscv_vnsrl_wx_u8mf2(__VA_ARGS__) |
| #define vnsrl_wx_u8mf2_m | ( | ... | ) | __riscv_vnsrl_wx_u8mf2_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u8mf4 | ( | ... | ) | __riscv_vnsrl_wx_u8mf4(__VA_ARGS__) |
| #define vnsrl_wx_u8mf4_m | ( | ... | ) | __riscv_vnsrl_wx_u8mf4_tumu(__VA_ARGS__) |
| #define vnsrl_wx_u8mf8 | ( | ... | ) | __riscv_vnsrl_wx_u8mf8(__VA_ARGS__) |
| #define vnsrl_wx_u8mf8_m | ( | ... | ) | __riscv_vnsrl_wx_u8mf8_tumu(__VA_ARGS__) |
| #define vor_vv_i16m1 | ( | ... | ) | __riscv_vor_vv_i16m1(__VA_ARGS__) |
| #define vor_vv_i16m1_m | ( | ... | ) | __riscv_vor_vv_i16m1_tumu(__VA_ARGS__) |
| #define vor_vv_i16m2 | ( | ... | ) | __riscv_vor_vv_i16m2(__VA_ARGS__) |
| #define vor_vv_i16m2_m | ( | ... | ) | __riscv_vor_vv_i16m2_tumu(__VA_ARGS__) |
| #define vor_vv_i16m4 | ( | ... | ) | __riscv_vor_vv_i16m4(__VA_ARGS__) |
| #define vor_vv_i16m4_m | ( | ... | ) | __riscv_vor_vv_i16m4_tumu(__VA_ARGS__) |
| #define vor_vv_i16m8 | ( | ... | ) | __riscv_vor_vv_i16m8(__VA_ARGS__) |
| #define vor_vv_i16m8_m | ( | ... | ) | __riscv_vor_vv_i16m8_tumu(__VA_ARGS__) |
| #define vor_vv_i16mf2 | ( | ... | ) | __riscv_vor_vv_i16mf2(__VA_ARGS__) |
| #define vor_vv_i16mf2_m | ( | ... | ) | __riscv_vor_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vor_vv_i16mf4 | ( | ... | ) | __riscv_vor_vv_i16mf4(__VA_ARGS__) |
| #define vor_vv_i16mf4_m | ( | ... | ) | __riscv_vor_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vor_vv_i32m1 | ( | ... | ) | __riscv_vor_vv_i32m1(__VA_ARGS__) |
| #define vor_vv_i32m1_m | ( | ... | ) | __riscv_vor_vv_i32m1_tumu(__VA_ARGS__) |
| #define vor_vv_i32m2 | ( | ... | ) | __riscv_vor_vv_i32m2(__VA_ARGS__) |
| #define vor_vv_i32m2_m | ( | ... | ) | __riscv_vor_vv_i32m2_tumu(__VA_ARGS__) |
| #define vor_vv_i32m4 | ( | ... | ) | __riscv_vor_vv_i32m4(__VA_ARGS__) |
| #define vor_vv_i32m4_m | ( | ... | ) | __riscv_vor_vv_i32m4_tumu(__VA_ARGS__) |
| #define vor_vv_i32m8 | ( | ... | ) | __riscv_vor_vv_i32m8(__VA_ARGS__) |
| #define vor_vv_i32m8_m | ( | ... | ) | __riscv_vor_vv_i32m8_tumu(__VA_ARGS__) |
| #define vor_vv_i32mf2 | ( | ... | ) | __riscv_vor_vv_i32mf2(__VA_ARGS__) |
| #define vor_vv_i32mf2_m | ( | ... | ) | __riscv_vor_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vor_vv_i64m1 | ( | ... | ) | __riscv_vor_vv_i64m1(__VA_ARGS__) |
| #define vor_vv_i64m1_m | ( | ... | ) | __riscv_vor_vv_i64m1_tumu(__VA_ARGS__) |
| #define vor_vv_i64m2 | ( | ... | ) | __riscv_vor_vv_i64m2(__VA_ARGS__) |
| #define vor_vv_i64m2_m | ( | ... | ) | __riscv_vor_vv_i64m2_tumu(__VA_ARGS__) |
| #define vor_vv_i64m4 | ( | ... | ) | __riscv_vor_vv_i64m4(__VA_ARGS__) |
| #define vor_vv_i64m4_m | ( | ... | ) | __riscv_vor_vv_i64m4_tumu(__VA_ARGS__) |
| #define vor_vv_i64m8 | ( | ... | ) | __riscv_vor_vv_i64m8(__VA_ARGS__) |
| #define vor_vv_i64m8_m | ( | ... | ) | __riscv_vor_vv_i64m8_tumu(__VA_ARGS__) |
| #define vor_vv_i8m1 | ( | ... | ) | __riscv_vor_vv_i8m1(__VA_ARGS__) |
| #define vor_vv_i8m1_m | ( | ... | ) | __riscv_vor_vv_i8m1_tumu(__VA_ARGS__) |
| #define vor_vv_i8m2 | ( | ... | ) | __riscv_vor_vv_i8m2(__VA_ARGS__) |
| #define vor_vv_i8m2_m | ( | ... | ) | __riscv_vor_vv_i8m2_tumu(__VA_ARGS__) |
| #define vor_vv_i8m4 | ( | ... | ) | __riscv_vor_vv_i8m4(__VA_ARGS__) |
| #define vor_vv_i8m4_m | ( | ... | ) | __riscv_vor_vv_i8m4_tumu(__VA_ARGS__) |
| #define vor_vv_i8m8 | ( | ... | ) | __riscv_vor_vv_i8m8(__VA_ARGS__) |
| #define vor_vv_i8m8_m | ( | ... | ) | __riscv_vor_vv_i8m8_tumu(__VA_ARGS__) |
| #define vor_vv_i8mf2 | ( | ... | ) | __riscv_vor_vv_i8mf2(__VA_ARGS__) |
| #define vor_vv_i8mf2_m | ( | ... | ) | __riscv_vor_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vor_vv_i8mf4 | ( | ... | ) | __riscv_vor_vv_i8mf4(__VA_ARGS__) |
| #define vor_vv_i8mf4_m | ( | ... | ) | __riscv_vor_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vor_vv_i8mf8 | ( | ... | ) | __riscv_vor_vv_i8mf8(__VA_ARGS__) |
| #define vor_vv_i8mf8_m | ( | ... | ) | __riscv_vor_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vor_vv_u16m1 | ( | ... | ) | __riscv_vor_vv_u16m1(__VA_ARGS__) |
| #define vor_vv_u16m1_m | ( | ... | ) | __riscv_vor_vv_u16m1_tumu(__VA_ARGS__) |
| #define vor_vv_u16m2 | ( | ... | ) | __riscv_vor_vv_u16m2(__VA_ARGS__) |
| #define vor_vv_u16m2_m | ( | ... | ) | __riscv_vor_vv_u16m2_tumu(__VA_ARGS__) |
| #define vor_vv_u16m4 | ( | ... | ) | __riscv_vor_vv_u16m4(__VA_ARGS__) |
| #define vor_vv_u16m4_m | ( | ... | ) | __riscv_vor_vv_u16m4_tumu(__VA_ARGS__) |
| #define vor_vv_u16m8 | ( | ... | ) | __riscv_vor_vv_u16m8(__VA_ARGS__) |
| #define vor_vv_u16m8_m | ( | ... | ) | __riscv_vor_vv_u16m8_tumu(__VA_ARGS__) |
| #define vor_vv_u16mf2 | ( | ... | ) | __riscv_vor_vv_u16mf2(__VA_ARGS__) |
| #define vor_vv_u16mf2_m | ( | ... | ) | __riscv_vor_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vor_vv_u16mf4 | ( | ... | ) | __riscv_vor_vv_u16mf4(__VA_ARGS__) |
| #define vor_vv_u16mf4_m | ( | ... | ) | __riscv_vor_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vor_vv_u32m1 | ( | ... | ) | __riscv_vor_vv_u32m1(__VA_ARGS__) |
| #define vor_vv_u32m1_m | ( | ... | ) | __riscv_vor_vv_u32m1_tumu(__VA_ARGS__) |
| #define vor_vv_u32m2 | ( | ... | ) | __riscv_vor_vv_u32m2(__VA_ARGS__) |
| #define vor_vv_u32m2_m | ( | ... | ) | __riscv_vor_vv_u32m2_tumu(__VA_ARGS__) |
| #define vor_vv_u32m4 | ( | ... | ) | __riscv_vor_vv_u32m4(__VA_ARGS__) |
| #define vor_vv_u32m4_m | ( | ... | ) | __riscv_vor_vv_u32m4_tumu(__VA_ARGS__) |
| #define vor_vv_u32m8 | ( | ... | ) | __riscv_vor_vv_u32m8(__VA_ARGS__) |
| #define vor_vv_u32m8_m | ( | ... | ) | __riscv_vor_vv_u32m8_tumu(__VA_ARGS__) |
| #define vor_vv_u32mf2 | ( | ... | ) | __riscv_vor_vv_u32mf2(__VA_ARGS__) |
| #define vor_vv_u32mf2_m | ( | ... | ) | __riscv_vor_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vor_vv_u64m1 | ( | ... | ) | __riscv_vor_vv_u64m1(__VA_ARGS__) |
| #define vor_vv_u64m1_m | ( | ... | ) | __riscv_vor_vv_u64m1_tumu(__VA_ARGS__) |
| #define vor_vv_u64m2 | ( | ... | ) | __riscv_vor_vv_u64m2(__VA_ARGS__) |
| #define vor_vv_u64m2_m | ( | ... | ) | __riscv_vor_vv_u64m2_tumu(__VA_ARGS__) |
| #define vor_vv_u64m4 | ( | ... | ) | __riscv_vor_vv_u64m4(__VA_ARGS__) |
| #define vor_vv_u64m4_m | ( | ... | ) | __riscv_vor_vv_u64m4_tumu(__VA_ARGS__) |
| #define vor_vv_u64m8 | ( | ... | ) | __riscv_vor_vv_u64m8(__VA_ARGS__) |
| #define vor_vv_u64m8_m | ( | ... | ) | __riscv_vor_vv_u64m8_tumu(__VA_ARGS__) |
| #define vor_vv_u8m1 | ( | ... | ) | __riscv_vor_vv_u8m1(__VA_ARGS__) |
| #define vor_vv_u8m1_m | ( | ... | ) | __riscv_vor_vv_u8m1_tumu(__VA_ARGS__) |
| #define vor_vv_u8m2 | ( | ... | ) | __riscv_vor_vv_u8m2(__VA_ARGS__) |
| #define vor_vv_u8m2_m | ( | ... | ) | __riscv_vor_vv_u8m2_tumu(__VA_ARGS__) |
| #define vor_vv_u8m4 | ( | ... | ) | __riscv_vor_vv_u8m4(__VA_ARGS__) |
| #define vor_vv_u8m4_m | ( | ... | ) | __riscv_vor_vv_u8m4_tumu(__VA_ARGS__) |
| #define vor_vv_u8m8 | ( | ... | ) | __riscv_vor_vv_u8m8(__VA_ARGS__) |
| #define vor_vv_u8m8_m | ( | ... | ) | __riscv_vor_vv_u8m8_tumu(__VA_ARGS__) |
| #define vor_vv_u8mf2 | ( | ... | ) | __riscv_vor_vv_u8mf2(__VA_ARGS__) |
| #define vor_vv_u8mf2_m | ( | ... | ) | __riscv_vor_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vor_vv_u8mf4 | ( | ... | ) | __riscv_vor_vv_u8mf4(__VA_ARGS__) |
| #define vor_vv_u8mf4_m | ( | ... | ) | __riscv_vor_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vor_vv_u8mf8 | ( | ... | ) | __riscv_vor_vv_u8mf8(__VA_ARGS__) |
| #define vor_vv_u8mf8_m | ( | ... | ) | __riscv_vor_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vor_vx_i16m1 | ( | ... | ) | __riscv_vor_vx_i16m1(__VA_ARGS__) |
| #define vor_vx_i16m1_m | ( | ... | ) | __riscv_vor_vx_i16m1_tumu(__VA_ARGS__) |
| #define vor_vx_i16m2 | ( | ... | ) | __riscv_vor_vx_i16m2(__VA_ARGS__) |
| #define vor_vx_i16m2_m | ( | ... | ) | __riscv_vor_vx_i16m2_tumu(__VA_ARGS__) |
| #define vor_vx_i16m4 | ( | ... | ) | __riscv_vor_vx_i16m4(__VA_ARGS__) |
| #define vor_vx_i16m4_m | ( | ... | ) | __riscv_vor_vx_i16m4_tumu(__VA_ARGS__) |
| #define vor_vx_i16m8 | ( | ... | ) | __riscv_vor_vx_i16m8(__VA_ARGS__) |
| #define vor_vx_i16m8_m | ( | ... | ) | __riscv_vor_vx_i16m8_tumu(__VA_ARGS__) |
| #define vor_vx_i16mf2 | ( | ... | ) | __riscv_vor_vx_i16mf2(__VA_ARGS__) |
| #define vor_vx_i16mf2_m | ( | ... | ) | __riscv_vor_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vor_vx_i16mf4 | ( | ... | ) | __riscv_vor_vx_i16mf4(__VA_ARGS__) |
| #define vor_vx_i16mf4_m | ( | ... | ) | __riscv_vor_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vor_vx_i32m1 | ( | ... | ) | __riscv_vor_vx_i32m1(__VA_ARGS__) |
| #define vor_vx_i32m1_m | ( | ... | ) | __riscv_vor_vx_i32m1_tumu(__VA_ARGS__) |
| #define vor_vx_i32m2 | ( | ... | ) | __riscv_vor_vx_i32m2(__VA_ARGS__) |
| #define vor_vx_i32m2_m | ( | ... | ) | __riscv_vor_vx_i32m2_tumu(__VA_ARGS__) |
| #define vor_vx_i32m4 | ( | ... | ) | __riscv_vor_vx_i32m4(__VA_ARGS__) |
| #define vor_vx_i32m4_m | ( | ... | ) | __riscv_vor_vx_i32m4_tumu(__VA_ARGS__) |
| #define vor_vx_i32m8 | ( | ... | ) | __riscv_vor_vx_i32m8(__VA_ARGS__) |
| #define vor_vx_i32m8_m | ( | ... | ) | __riscv_vor_vx_i32m8_tumu(__VA_ARGS__) |
| #define vor_vx_i32mf2 | ( | ... | ) | __riscv_vor_vx_i32mf2(__VA_ARGS__) |
| #define vor_vx_i32mf2_m | ( | ... | ) | __riscv_vor_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vor_vx_i64m1 | ( | ... | ) | __riscv_vor_vx_i64m1(__VA_ARGS__) |
| #define vor_vx_i64m1_m | ( | ... | ) | __riscv_vor_vx_i64m1_tumu(__VA_ARGS__) |
| #define vor_vx_i64m2 | ( | ... | ) | __riscv_vor_vx_i64m2(__VA_ARGS__) |
| #define vor_vx_i64m2_m | ( | ... | ) | __riscv_vor_vx_i64m2_tumu(__VA_ARGS__) |
| #define vor_vx_i64m4 | ( | ... | ) | __riscv_vor_vx_i64m4(__VA_ARGS__) |
| #define vor_vx_i64m4_m | ( | ... | ) | __riscv_vor_vx_i64m4_tumu(__VA_ARGS__) |
| #define vor_vx_i64m8 | ( | ... | ) | __riscv_vor_vx_i64m8(__VA_ARGS__) |
| #define vor_vx_i64m8_m | ( | ... | ) | __riscv_vor_vx_i64m8_tumu(__VA_ARGS__) |
| #define vor_vx_i8m1 | ( | ... | ) | __riscv_vor_vx_i8m1(__VA_ARGS__) |
| #define vor_vx_i8m1_m | ( | ... | ) | __riscv_vor_vx_i8m1_tumu(__VA_ARGS__) |
| #define vor_vx_i8m2 | ( | ... | ) | __riscv_vor_vx_i8m2(__VA_ARGS__) |
| #define vor_vx_i8m2_m | ( | ... | ) | __riscv_vor_vx_i8m2_tumu(__VA_ARGS__) |
| #define vor_vx_i8m4 | ( | ... | ) | __riscv_vor_vx_i8m4(__VA_ARGS__) |
| #define vor_vx_i8m4_m | ( | ... | ) | __riscv_vor_vx_i8m4_tumu(__VA_ARGS__) |
| #define vor_vx_i8m8 | ( | ... | ) | __riscv_vor_vx_i8m8(__VA_ARGS__) |
| #define vor_vx_i8m8_m | ( | ... | ) | __riscv_vor_vx_i8m8_tumu(__VA_ARGS__) |
| #define vor_vx_i8mf2 | ( | ... | ) | __riscv_vor_vx_i8mf2(__VA_ARGS__) |
| #define vor_vx_i8mf2_m | ( | ... | ) | __riscv_vor_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vor_vx_i8mf4 | ( | ... | ) | __riscv_vor_vx_i8mf4(__VA_ARGS__) |
| #define vor_vx_i8mf4_m | ( | ... | ) | __riscv_vor_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vor_vx_i8mf8 | ( | ... | ) | __riscv_vor_vx_i8mf8(__VA_ARGS__) |
| #define vor_vx_i8mf8_m | ( | ... | ) | __riscv_vor_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vor_vx_u16m1 | ( | ... | ) | __riscv_vor_vx_u16m1(__VA_ARGS__) |
| #define vor_vx_u16m1_m | ( | ... | ) | __riscv_vor_vx_u16m1_tumu(__VA_ARGS__) |
| #define vor_vx_u16m2 | ( | ... | ) | __riscv_vor_vx_u16m2(__VA_ARGS__) |
| #define vor_vx_u16m2_m | ( | ... | ) | __riscv_vor_vx_u16m2_tumu(__VA_ARGS__) |
| #define vor_vx_u16m4 | ( | ... | ) | __riscv_vor_vx_u16m4(__VA_ARGS__) |
| #define vor_vx_u16m4_m | ( | ... | ) | __riscv_vor_vx_u16m4_tumu(__VA_ARGS__) |
| #define vor_vx_u16m8 | ( | ... | ) | __riscv_vor_vx_u16m8(__VA_ARGS__) |
| #define vor_vx_u16m8_m | ( | ... | ) | __riscv_vor_vx_u16m8_tumu(__VA_ARGS__) |
| #define vor_vx_u16mf2 | ( | ... | ) | __riscv_vor_vx_u16mf2(__VA_ARGS__) |
| #define vor_vx_u16mf2_m | ( | ... | ) | __riscv_vor_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vor_vx_u16mf4 | ( | ... | ) | __riscv_vor_vx_u16mf4(__VA_ARGS__) |
| #define vor_vx_u16mf4_m | ( | ... | ) | __riscv_vor_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vor_vx_u32m1 | ( | ... | ) | __riscv_vor_vx_u32m1(__VA_ARGS__) |
| #define vor_vx_u32m1_m | ( | ... | ) | __riscv_vor_vx_u32m1_tumu(__VA_ARGS__) |
| #define vor_vx_u32m2 | ( | ... | ) | __riscv_vor_vx_u32m2(__VA_ARGS__) |
| #define vor_vx_u32m2_m | ( | ... | ) | __riscv_vor_vx_u32m2_tumu(__VA_ARGS__) |
| #define vor_vx_u32m4 | ( | ... | ) | __riscv_vor_vx_u32m4(__VA_ARGS__) |
| #define vor_vx_u32m4_m | ( | ... | ) | __riscv_vor_vx_u32m4_tumu(__VA_ARGS__) |
| #define vor_vx_u32m8 | ( | ... | ) | __riscv_vor_vx_u32m8(__VA_ARGS__) |
| #define vor_vx_u32m8_m | ( | ... | ) | __riscv_vor_vx_u32m8_tumu(__VA_ARGS__) |
| #define vor_vx_u32mf2 | ( | ... | ) | __riscv_vor_vx_u32mf2(__VA_ARGS__) |
| #define vor_vx_u32mf2_m | ( | ... | ) | __riscv_vor_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vor_vx_u64m1 | ( | ... | ) | __riscv_vor_vx_u64m1(__VA_ARGS__) |
| #define vor_vx_u64m1_m | ( | ... | ) | __riscv_vor_vx_u64m1_tumu(__VA_ARGS__) |
| #define vor_vx_u64m2 | ( | ... | ) | __riscv_vor_vx_u64m2(__VA_ARGS__) |
| #define vor_vx_u64m2_m | ( | ... | ) | __riscv_vor_vx_u64m2_tumu(__VA_ARGS__) |
| #define vor_vx_u64m4 | ( | ... | ) | __riscv_vor_vx_u64m4(__VA_ARGS__) |
| #define vor_vx_u64m4_m | ( | ... | ) | __riscv_vor_vx_u64m4_tumu(__VA_ARGS__) |
| #define vor_vx_u64m8 | ( | ... | ) | __riscv_vor_vx_u64m8(__VA_ARGS__) |
| #define vor_vx_u64m8_m | ( | ... | ) | __riscv_vor_vx_u64m8_tumu(__VA_ARGS__) |
| #define vor_vx_u8m1 | ( | ... | ) | __riscv_vor_vx_u8m1(__VA_ARGS__) |
| #define vor_vx_u8m1_m | ( | ... | ) | __riscv_vor_vx_u8m1_tumu(__VA_ARGS__) |
| #define vor_vx_u8m2 | ( | ... | ) | __riscv_vor_vx_u8m2(__VA_ARGS__) |
| #define vor_vx_u8m2_m | ( | ... | ) | __riscv_vor_vx_u8m2_tumu(__VA_ARGS__) |
| #define vor_vx_u8m4 | ( | ... | ) | __riscv_vor_vx_u8m4(__VA_ARGS__) |
| #define vor_vx_u8m4_m | ( | ... | ) | __riscv_vor_vx_u8m4_tumu(__VA_ARGS__) |
| #define vor_vx_u8m8 | ( | ... | ) | __riscv_vor_vx_u8m8(__VA_ARGS__) |
| #define vor_vx_u8m8_m | ( | ... | ) | __riscv_vor_vx_u8m8_tumu(__VA_ARGS__) |
| #define vor_vx_u8mf2 | ( | ... | ) | __riscv_vor_vx_u8mf2(__VA_ARGS__) |
| #define vor_vx_u8mf2_m | ( | ... | ) | __riscv_vor_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vor_vx_u8mf4 | ( | ... | ) | __riscv_vor_vx_u8mf4(__VA_ARGS__) |
| #define vor_vx_u8mf4_m | ( | ... | ) | __riscv_vor_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vor_vx_u8mf8 | ( | ... | ) | __riscv_vor_vx_u8mf8(__VA_ARGS__) |
| #define vor_vx_u8mf8_m | ( | ... | ) | __riscv_vor_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vredand_vs_i16m1_i16m1 | ( | ... | ) | __riscv_vredand_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define vredand_vs_i16m1_i16m1_m | ( | ... | ) | __riscv_vredand_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define vredand_vs_i16m2_i16m1 | ( | ... | ) | __riscv_vredand_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define vredand_vs_i16m2_i16m1_m | ( | ... | ) | __riscv_vredand_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define vredand_vs_i16m4_i16m1 | ( | ... | ) | __riscv_vredand_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define vredand_vs_i16m4_i16m1_m | ( | ... | ) | __riscv_vredand_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define vredand_vs_i16m8_i16m1 | ( | ... | ) | __riscv_vredand_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define vredand_vs_i16m8_i16m1_m | ( | ... | ) | __riscv_vredand_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define vredand_vs_i16mf2_i16m1 | ( | ... | ) | __riscv_vredand_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define vredand_vs_i16mf2_i16m1_m | ( | ... | ) | __riscv_vredand_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define vredand_vs_i16mf4_i16m1 | ( | ... | ) | __riscv_vredand_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define vredand_vs_i16mf4_i16m1_m | ( | ... | ) | __riscv_vredand_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define vredand_vs_i32m1_i32m1 | ( | ... | ) | __riscv_vredand_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define vredand_vs_i32m1_i32m1_m | ( | ... | ) | __riscv_vredand_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define vredand_vs_i32m2_i32m1 | ( | ... | ) | __riscv_vredand_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define vredand_vs_i32m2_i32m1_m | ( | ... | ) | __riscv_vredand_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define vredand_vs_i32m4_i32m1 | ( | ... | ) | __riscv_vredand_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define vredand_vs_i32m4_i32m1_m | ( | ... | ) | __riscv_vredand_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define vredand_vs_i32m8_i32m1 | ( | ... | ) | __riscv_vredand_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define vredand_vs_i32m8_i32m1_m | ( | ... | ) | __riscv_vredand_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define vredand_vs_i32mf2_i32m1 | ( | ... | ) | __riscv_vredand_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define vredand_vs_i32mf2_i32m1_m | ( | ... | ) | __riscv_vredand_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define vredand_vs_i64m1_i64m1 | ( | ... | ) | __riscv_vredand_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define vredand_vs_i64m1_i64m1_m | ( | ... | ) | __riscv_vredand_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define vredand_vs_i64m2_i64m1 | ( | ... | ) | __riscv_vredand_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define vredand_vs_i64m2_i64m1_m | ( | ... | ) | __riscv_vredand_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define vredand_vs_i64m4_i64m1 | ( | ... | ) | __riscv_vredand_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define vredand_vs_i64m4_i64m1_m | ( | ... | ) | __riscv_vredand_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define vredand_vs_i64m8_i64m1 | ( | ... | ) | __riscv_vredand_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define vredand_vs_i64m8_i64m1_m | ( | ... | ) | __riscv_vredand_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define vredand_vs_i8m1_i8m1 | ( | ... | ) | __riscv_vredand_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define vredand_vs_i8m1_i8m1_m | ( | ... | ) | __riscv_vredand_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define vredand_vs_i8m2_i8m1 | ( | ... | ) | __riscv_vredand_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define vredand_vs_i8m2_i8m1_m | ( | ... | ) | __riscv_vredand_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define vredand_vs_i8m4_i8m1 | ( | ... | ) | __riscv_vredand_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define vredand_vs_i8m4_i8m1_m | ( | ... | ) | __riscv_vredand_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define vredand_vs_i8m8_i8m1 | ( | ... | ) | __riscv_vredand_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define vredand_vs_i8m8_i8m1_m | ( | ... | ) | __riscv_vredand_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define vredand_vs_i8mf2_i8m1 | ( | ... | ) | __riscv_vredand_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define vredand_vs_i8mf2_i8m1_m | ( | ... | ) | __riscv_vredand_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define vredand_vs_i8mf4_i8m1 | ( | ... | ) | __riscv_vredand_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define vredand_vs_i8mf4_i8m1_m | ( | ... | ) | __riscv_vredand_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define vredand_vs_i8mf8_i8m1 | ( | ... | ) | __riscv_vredand_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define vredand_vs_i8mf8_i8m1_m | ( | ... | ) | __riscv_vredand_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define vredand_vs_u16m1_u16m1 | ( | ... | ) | __riscv_vredand_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define vredand_vs_u16m1_u16m1_m | ( | ... | ) | __riscv_vredand_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define vredand_vs_u16m2_u16m1 | ( | ... | ) | __riscv_vredand_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define vredand_vs_u16m2_u16m1_m | ( | ... | ) | __riscv_vredand_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define vredand_vs_u16m4_u16m1 | ( | ... | ) | __riscv_vredand_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define vredand_vs_u16m4_u16m1_m | ( | ... | ) | __riscv_vredand_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define vredand_vs_u16m8_u16m1 | ( | ... | ) | __riscv_vredand_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define vredand_vs_u16m8_u16m1_m | ( | ... | ) | __riscv_vredand_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define vredand_vs_u16mf2_u16m1 | ( | ... | ) | __riscv_vredand_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define vredand_vs_u16mf2_u16m1_m | ( | ... | ) | __riscv_vredand_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define vredand_vs_u16mf4_u16m1 | ( | ... | ) | __riscv_vredand_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define vredand_vs_u16mf4_u16m1_m | ( | ... | ) | __riscv_vredand_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define vredand_vs_u32m1_u32m1 | ( | ... | ) | __riscv_vredand_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define vredand_vs_u32m1_u32m1_m | ( | ... | ) | __riscv_vredand_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define vredand_vs_u32m2_u32m1 | ( | ... | ) | __riscv_vredand_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define vredand_vs_u32m2_u32m1_m | ( | ... | ) | __riscv_vredand_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define vredand_vs_u32m4_u32m1 | ( | ... | ) | __riscv_vredand_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define vredand_vs_u32m4_u32m1_m | ( | ... | ) | __riscv_vredand_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define vredand_vs_u32m8_u32m1 | ( | ... | ) | __riscv_vredand_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define vredand_vs_u32m8_u32m1_m | ( | ... | ) | __riscv_vredand_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define vredand_vs_u32mf2_u32m1 | ( | ... | ) | __riscv_vredand_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define vredand_vs_u32mf2_u32m1_m | ( | ... | ) | __riscv_vredand_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define vredand_vs_u64m1_u64m1 | ( | ... | ) | __riscv_vredand_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define vredand_vs_u64m1_u64m1_m | ( | ... | ) | __riscv_vredand_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define vredand_vs_u64m2_u64m1 | ( | ... | ) | __riscv_vredand_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define vredand_vs_u64m2_u64m1_m | ( | ... | ) | __riscv_vredand_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define vredand_vs_u64m4_u64m1 | ( | ... | ) | __riscv_vredand_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define vredand_vs_u64m4_u64m1_m | ( | ... | ) | __riscv_vredand_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define vredand_vs_u64m8_u64m1 | ( | ... | ) | __riscv_vredand_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define vredand_vs_u64m8_u64m1_m | ( | ... | ) | __riscv_vredand_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define vredand_vs_u8m1_u8m1 | ( | ... | ) | __riscv_vredand_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define vredand_vs_u8m1_u8m1_m | ( | ... | ) | __riscv_vredand_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define vredand_vs_u8m2_u8m1 | ( | ... | ) | __riscv_vredand_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define vredand_vs_u8m2_u8m1_m | ( | ... | ) | __riscv_vredand_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define vredand_vs_u8m4_u8m1 | ( | ... | ) | __riscv_vredand_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define vredand_vs_u8m4_u8m1_m | ( | ... | ) | __riscv_vredand_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define vredand_vs_u8m8_u8m1 | ( | ... | ) | __riscv_vredand_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define vredand_vs_u8m8_u8m1_m | ( | ... | ) | __riscv_vredand_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define vredand_vs_u8mf2_u8m1 | ( | ... | ) | __riscv_vredand_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define vredand_vs_u8mf2_u8m1_m | ( | ... | ) | __riscv_vredand_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define vredand_vs_u8mf4_u8m1 | ( | ... | ) | __riscv_vredand_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define vredand_vs_u8mf4_u8m1_m | ( | ... | ) | __riscv_vredand_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define vredand_vs_u8mf8_u8m1 | ( | ... | ) | __riscv_vredand_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define vredand_vs_u8mf8_u8m1_m | ( | ... | ) | __riscv_vredand_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i16m1_i16m1 | ( | ... | ) | __riscv_vredmax_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i16m1_i16m1_m | ( | ... | ) | __riscv_vredmax_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i16m2_i16m1 | ( | ... | ) | __riscv_vredmax_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i16m2_i16m1_m | ( | ... | ) | __riscv_vredmax_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i16m4_i16m1 | ( | ... | ) | __riscv_vredmax_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i16m4_i16m1_m | ( | ... | ) | __riscv_vredmax_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i16m8_i16m1 | ( | ... | ) | __riscv_vredmax_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i16m8_i16m1_m | ( | ... | ) | __riscv_vredmax_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i16mf2_i16m1 | ( | ... | ) | __riscv_vredmax_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i16mf2_i16m1_m | ( | ... | ) | __riscv_vredmax_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i16mf4_i16m1 | ( | ... | ) | __riscv_vredmax_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i16mf4_i16m1_m | ( | ... | ) | __riscv_vredmax_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i32m1_i32m1 | ( | ... | ) | __riscv_vredmax_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i32m1_i32m1_m | ( | ... | ) | __riscv_vredmax_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i32m2_i32m1 | ( | ... | ) | __riscv_vredmax_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i32m2_i32m1_m | ( | ... | ) | __riscv_vredmax_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i32m4_i32m1 | ( | ... | ) | __riscv_vredmax_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i32m4_i32m1_m | ( | ... | ) | __riscv_vredmax_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i32m8_i32m1 | ( | ... | ) | __riscv_vredmax_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i32m8_i32m1_m | ( | ... | ) | __riscv_vredmax_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i32mf2_i32m1 | ( | ... | ) | __riscv_vredmax_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i32mf2_i32m1_m | ( | ... | ) | __riscv_vredmax_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i64m1_i64m1 | ( | ... | ) | __riscv_vredmax_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i64m1_i64m1_m | ( | ... | ) | __riscv_vredmax_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i64m2_i64m1 | ( | ... | ) | __riscv_vredmax_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i64m2_i64m1_m | ( | ... | ) | __riscv_vredmax_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i64m4_i64m1 | ( | ... | ) | __riscv_vredmax_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i64m4_i64m1_m | ( | ... | ) | __riscv_vredmax_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i64m8_i64m1 | ( | ... | ) | __riscv_vredmax_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i64m8_i64m1_m | ( | ... | ) | __riscv_vredmax_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i8m1_i8m1 | ( | ... | ) | __riscv_vredmax_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i8m1_i8m1_m | ( | ... | ) | __riscv_vredmax_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i8m2_i8m1 | ( | ... | ) | __riscv_vredmax_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i8m2_i8m1_m | ( | ... | ) | __riscv_vredmax_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i8m4_i8m1 | ( | ... | ) | __riscv_vredmax_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i8m4_i8m1_m | ( | ... | ) | __riscv_vredmax_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i8m8_i8m1 | ( | ... | ) | __riscv_vredmax_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i8m8_i8m1_m | ( | ... | ) | __riscv_vredmax_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i8mf2_i8m1 | ( | ... | ) | __riscv_vredmax_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i8mf2_i8m1_m | ( | ... | ) | __riscv_vredmax_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i8mf4_i8m1 | ( | ... | ) | __riscv_vredmax_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i8mf4_i8m1_m | ( | ... | ) | __riscv_vredmax_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define vredmax_vs_i8mf8_i8m1 | ( | ... | ) | __riscv_vredmax_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define vredmax_vs_i8mf8_i8m1_m | ( | ... | ) | __riscv_vredmax_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u16m1_u16m1 | ( | ... | ) | __riscv_vredmaxu_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u16m1_u16m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u16m2_u16m1 | ( | ... | ) | __riscv_vredmaxu_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u16m2_u16m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u16m4_u16m1 | ( | ... | ) | __riscv_vredmaxu_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u16m4_u16m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u16m8_u16m1 | ( | ... | ) | __riscv_vredmaxu_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u16m8_u16m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u16mf2_u16m1 | ( | ... | ) | __riscv_vredmaxu_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u16mf2_u16m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u16mf4_u16m1 | ( | ... | ) | __riscv_vredmaxu_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u16mf4_u16m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u32m1_u32m1 | ( | ... | ) | __riscv_vredmaxu_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u32m1_u32m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u32m2_u32m1 | ( | ... | ) | __riscv_vredmaxu_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u32m2_u32m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u32m4_u32m1 | ( | ... | ) | __riscv_vredmaxu_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u32m4_u32m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u32m8_u32m1 | ( | ... | ) | __riscv_vredmaxu_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u32m8_u32m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u32mf2_u32m1 | ( | ... | ) | __riscv_vredmaxu_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u32mf2_u32m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u64m1_u64m1 | ( | ... | ) | __riscv_vredmaxu_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u64m1_u64m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u64m2_u64m1 | ( | ... | ) | __riscv_vredmaxu_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u64m2_u64m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u64m4_u64m1 | ( | ... | ) | __riscv_vredmaxu_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u64m4_u64m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u64m8_u64m1 | ( | ... | ) | __riscv_vredmaxu_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u64m8_u64m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u8m1_u8m1 | ( | ... | ) | __riscv_vredmaxu_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u8m1_u8m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u8m2_u8m1 | ( | ... | ) | __riscv_vredmaxu_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u8m2_u8m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u8m4_u8m1 | ( | ... | ) | __riscv_vredmaxu_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u8m4_u8m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u8m8_u8m1 | ( | ... | ) | __riscv_vredmaxu_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u8m8_u8m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u8mf2_u8m1 | ( | ... | ) | __riscv_vredmaxu_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u8mf2_u8m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u8mf4_u8m1 | ( | ... | ) | __riscv_vredmaxu_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u8mf4_u8m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define vredmaxu_vs_u8mf8_u8m1 | ( | ... | ) | __riscv_vredmaxu_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define vredmaxu_vs_u8mf8_u8m1_m | ( | ... | ) | __riscv_vredmaxu_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i16m1_i16m1 | ( | ... | ) | __riscv_vredmin_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i16m1_i16m1_m | ( | ... | ) | __riscv_vredmin_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i16m2_i16m1 | ( | ... | ) | __riscv_vredmin_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i16m2_i16m1_m | ( | ... | ) | __riscv_vredmin_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i16m4_i16m1 | ( | ... | ) | __riscv_vredmin_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i16m4_i16m1_m | ( | ... | ) | __riscv_vredmin_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i16m8_i16m1 | ( | ... | ) | __riscv_vredmin_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i16m8_i16m1_m | ( | ... | ) | __riscv_vredmin_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i16mf2_i16m1 | ( | ... | ) | __riscv_vredmin_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i16mf2_i16m1_m | ( | ... | ) | __riscv_vredmin_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i16mf4_i16m1 | ( | ... | ) | __riscv_vredmin_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i16mf4_i16m1_m | ( | ... | ) | __riscv_vredmin_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i32m1_i32m1 | ( | ... | ) | __riscv_vredmin_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i32m1_i32m1_m | ( | ... | ) | __riscv_vredmin_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i32m2_i32m1 | ( | ... | ) | __riscv_vredmin_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i32m2_i32m1_m | ( | ... | ) | __riscv_vredmin_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i32m4_i32m1 | ( | ... | ) | __riscv_vredmin_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i32m4_i32m1_m | ( | ... | ) | __riscv_vredmin_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i32m8_i32m1 | ( | ... | ) | __riscv_vredmin_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i32m8_i32m1_m | ( | ... | ) | __riscv_vredmin_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i32mf2_i32m1 | ( | ... | ) | __riscv_vredmin_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i32mf2_i32m1_m | ( | ... | ) | __riscv_vredmin_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i64m1_i64m1 | ( | ... | ) | __riscv_vredmin_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i64m1_i64m1_m | ( | ... | ) | __riscv_vredmin_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i64m2_i64m1 | ( | ... | ) | __riscv_vredmin_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i64m2_i64m1_m | ( | ... | ) | __riscv_vredmin_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i64m4_i64m1 | ( | ... | ) | __riscv_vredmin_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i64m4_i64m1_m | ( | ... | ) | __riscv_vredmin_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i64m8_i64m1 | ( | ... | ) | __riscv_vredmin_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i64m8_i64m1_m | ( | ... | ) | __riscv_vredmin_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i8m1_i8m1 | ( | ... | ) | __riscv_vredmin_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i8m1_i8m1_m | ( | ... | ) | __riscv_vredmin_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i8m2_i8m1 | ( | ... | ) | __riscv_vredmin_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i8m2_i8m1_m | ( | ... | ) | __riscv_vredmin_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i8m4_i8m1 | ( | ... | ) | __riscv_vredmin_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i8m4_i8m1_m | ( | ... | ) | __riscv_vredmin_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i8m8_i8m1 | ( | ... | ) | __riscv_vredmin_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i8m8_i8m1_m | ( | ... | ) | __riscv_vredmin_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i8mf2_i8m1 | ( | ... | ) | __riscv_vredmin_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i8mf2_i8m1_m | ( | ... | ) | __riscv_vredmin_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i8mf4_i8m1 | ( | ... | ) | __riscv_vredmin_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i8mf4_i8m1_m | ( | ... | ) | __riscv_vredmin_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define vredmin_vs_i8mf8_i8m1 | ( | ... | ) | __riscv_vredmin_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define vredmin_vs_i8mf8_i8m1_m | ( | ... | ) | __riscv_vredmin_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u16m1_u16m1 | ( | ... | ) | __riscv_vredminu_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u16m1_u16m1_m | ( | ... | ) | __riscv_vredminu_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u16m2_u16m1 | ( | ... | ) | __riscv_vredminu_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u16m2_u16m1_m | ( | ... | ) | __riscv_vredminu_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u16m4_u16m1 | ( | ... | ) | __riscv_vredminu_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u16m4_u16m1_m | ( | ... | ) | __riscv_vredminu_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u16m8_u16m1 | ( | ... | ) | __riscv_vredminu_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u16m8_u16m1_m | ( | ... | ) | __riscv_vredminu_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u16mf2_u16m1 | ( | ... | ) | __riscv_vredminu_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u16mf2_u16m1_m | ( | ... | ) | __riscv_vredminu_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u16mf4_u16m1 | ( | ... | ) | __riscv_vredminu_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u16mf4_u16m1_m | ( | ... | ) | __riscv_vredminu_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u32m1_u32m1 | ( | ... | ) | __riscv_vredminu_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u32m1_u32m1_m | ( | ... | ) | __riscv_vredminu_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u32m2_u32m1 | ( | ... | ) | __riscv_vredminu_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u32m2_u32m1_m | ( | ... | ) | __riscv_vredminu_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u32m4_u32m1 | ( | ... | ) | __riscv_vredminu_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u32m4_u32m1_m | ( | ... | ) | __riscv_vredminu_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u32m8_u32m1 | ( | ... | ) | __riscv_vredminu_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u32m8_u32m1_m | ( | ... | ) | __riscv_vredminu_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u32mf2_u32m1 | ( | ... | ) | __riscv_vredminu_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u32mf2_u32m1_m | ( | ... | ) | __riscv_vredminu_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u64m1_u64m1 | ( | ... | ) | __riscv_vredminu_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u64m1_u64m1_m | ( | ... | ) | __riscv_vredminu_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u64m2_u64m1 | ( | ... | ) | __riscv_vredminu_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u64m2_u64m1_m | ( | ... | ) | __riscv_vredminu_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u64m4_u64m1 | ( | ... | ) | __riscv_vredminu_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u64m4_u64m1_m | ( | ... | ) | __riscv_vredminu_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u64m8_u64m1 | ( | ... | ) | __riscv_vredminu_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u64m8_u64m1_m | ( | ... | ) | __riscv_vredminu_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u8m1_u8m1 | ( | ... | ) | __riscv_vredminu_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u8m1_u8m1_m | ( | ... | ) | __riscv_vredminu_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u8m2_u8m1 | ( | ... | ) | __riscv_vredminu_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u8m2_u8m1_m | ( | ... | ) | __riscv_vredminu_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u8m4_u8m1 | ( | ... | ) | __riscv_vredminu_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u8m4_u8m1_m | ( | ... | ) | __riscv_vredminu_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u8m8_u8m1 | ( | ... | ) | __riscv_vredminu_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u8m8_u8m1_m | ( | ... | ) | __riscv_vredminu_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u8mf2_u8m1 | ( | ... | ) | __riscv_vredminu_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u8mf2_u8m1_m | ( | ... | ) | __riscv_vredminu_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u8mf4_u8m1 | ( | ... | ) | __riscv_vredminu_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u8mf4_u8m1_m | ( | ... | ) | __riscv_vredminu_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define vredminu_vs_u8mf8_u8m1 | ( | ... | ) | __riscv_vredminu_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define vredminu_vs_u8mf8_u8m1_m | ( | ... | ) | __riscv_vredminu_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define vredor_vs_i16m1_i16m1 | ( | ... | ) | __riscv_vredor_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define vredor_vs_i16m1_i16m1_m | ( | ... | ) | __riscv_vredor_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define vredor_vs_i16m2_i16m1 | ( | ... | ) | __riscv_vredor_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define vredor_vs_i16m2_i16m1_m | ( | ... | ) | __riscv_vredor_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define vredor_vs_i16m4_i16m1 | ( | ... | ) | __riscv_vredor_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define vredor_vs_i16m4_i16m1_m | ( | ... | ) | __riscv_vredor_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define vredor_vs_i16m8_i16m1 | ( | ... | ) | __riscv_vredor_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define vredor_vs_i16m8_i16m1_m | ( | ... | ) | __riscv_vredor_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define vredor_vs_i16mf2_i16m1 | ( | ... | ) | __riscv_vredor_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define vredor_vs_i16mf2_i16m1_m | ( | ... | ) | __riscv_vredor_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define vredor_vs_i16mf4_i16m1 | ( | ... | ) | __riscv_vredor_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define vredor_vs_i16mf4_i16m1_m | ( | ... | ) | __riscv_vredor_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define vredor_vs_i32m1_i32m1 | ( | ... | ) | __riscv_vredor_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define vredor_vs_i32m1_i32m1_m | ( | ... | ) | __riscv_vredor_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define vredor_vs_i32m2_i32m1 | ( | ... | ) | __riscv_vredor_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define vredor_vs_i32m2_i32m1_m | ( | ... | ) | __riscv_vredor_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define vredor_vs_i32m4_i32m1 | ( | ... | ) | __riscv_vredor_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define vredor_vs_i32m4_i32m1_m | ( | ... | ) | __riscv_vredor_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define vredor_vs_i32m8_i32m1 | ( | ... | ) | __riscv_vredor_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define vredor_vs_i32m8_i32m1_m | ( | ... | ) | __riscv_vredor_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define vredor_vs_i32mf2_i32m1 | ( | ... | ) | __riscv_vredor_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define vredor_vs_i32mf2_i32m1_m | ( | ... | ) | __riscv_vredor_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define vredor_vs_i64m1_i64m1 | ( | ... | ) | __riscv_vredor_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define vredor_vs_i64m1_i64m1_m | ( | ... | ) | __riscv_vredor_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define vredor_vs_i64m2_i64m1 | ( | ... | ) | __riscv_vredor_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define vredor_vs_i64m2_i64m1_m | ( | ... | ) | __riscv_vredor_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define vredor_vs_i64m4_i64m1 | ( | ... | ) | __riscv_vredor_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define vredor_vs_i64m4_i64m1_m | ( | ... | ) | __riscv_vredor_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define vredor_vs_i64m8_i64m1 | ( | ... | ) | __riscv_vredor_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define vredor_vs_i64m8_i64m1_m | ( | ... | ) | __riscv_vredor_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define vredor_vs_i8m1_i8m1 | ( | ... | ) | __riscv_vredor_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define vredor_vs_i8m1_i8m1_m | ( | ... | ) | __riscv_vredor_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define vredor_vs_i8m2_i8m1 | ( | ... | ) | __riscv_vredor_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define vredor_vs_i8m2_i8m1_m | ( | ... | ) | __riscv_vredor_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define vredor_vs_i8m4_i8m1 | ( | ... | ) | __riscv_vredor_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define vredor_vs_i8m4_i8m1_m | ( | ... | ) | __riscv_vredor_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define vredor_vs_i8m8_i8m1 | ( | ... | ) | __riscv_vredor_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define vredor_vs_i8m8_i8m1_m | ( | ... | ) | __riscv_vredor_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define vredor_vs_i8mf2_i8m1 | ( | ... | ) | __riscv_vredor_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define vredor_vs_i8mf2_i8m1_m | ( | ... | ) | __riscv_vredor_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define vredor_vs_i8mf4_i8m1 | ( | ... | ) | __riscv_vredor_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define vredor_vs_i8mf4_i8m1_m | ( | ... | ) | __riscv_vredor_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define vredor_vs_i8mf8_i8m1 | ( | ... | ) | __riscv_vredor_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define vredor_vs_i8mf8_i8m1_m | ( | ... | ) | __riscv_vredor_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define vredor_vs_u16m1_u16m1 | ( | ... | ) | __riscv_vredor_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define vredor_vs_u16m1_u16m1_m | ( | ... | ) | __riscv_vredor_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define vredor_vs_u16m2_u16m1 | ( | ... | ) | __riscv_vredor_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define vredor_vs_u16m2_u16m1_m | ( | ... | ) | __riscv_vredor_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define vredor_vs_u16m4_u16m1 | ( | ... | ) | __riscv_vredor_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define vredor_vs_u16m4_u16m1_m | ( | ... | ) | __riscv_vredor_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define vredor_vs_u16m8_u16m1 | ( | ... | ) | __riscv_vredor_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define vredor_vs_u16m8_u16m1_m | ( | ... | ) | __riscv_vredor_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define vredor_vs_u16mf2_u16m1 | ( | ... | ) | __riscv_vredor_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define vredor_vs_u16mf2_u16m1_m | ( | ... | ) | __riscv_vredor_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define vredor_vs_u16mf4_u16m1 | ( | ... | ) | __riscv_vredor_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define vredor_vs_u16mf4_u16m1_m | ( | ... | ) | __riscv_vredor_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define vredor_vs_u32m1_u32m1 | ( | ... | ) | __riscv_vredor_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define vredor_vs_u32m1_u32m1_m | ( | ... | ) | __riscv_vredor_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define vredor_vs_u32m2_u32m1 | ( | ... | ) | __riscv_vredor_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define vredor_vs_u32m2_u32m1_m | ( | ... | ) | __riscv_vredor_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define vredor_vs_u32m4_u32m1 | ( | ... | ) | __riscv_vredor_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define vredor_vs_u32m4_u32m1_m | ( | ... | ) | __riscv_vredor_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define vredor_vs_u32m8_u32m1 | ( | ... | ) | __riscv_vredor_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define vredor_vs_u32m8_u32m1_m | ( | ... | ) | __riscv_vredor_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define vredor_vs_u32mf2_u32m1 | ( | ... | ) | __riscv_vredor_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define vredor_vs_u32mf2_u32m1_m | ( | ... | ) | __riscv_vredor_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define vredor_vs_u64m1_u64m1 | ( | ... | ) | __riscv_vredor_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define vredor_vs_u64m1_u64m1_m | ( | ... | ) | __riscv_vredor_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define vredor_vs_u64m2_u64m1 | ( | ... | ) | __riscv_vredor_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define vredor_vs_u64m2_u64m1_m | ( | ... | ) | __riscv_vredor_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define vredor_vs_u64m4_u64m1 | ( | ... | ) | __riscv_vredor_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define vredor_vs_u64m4_u64m1_m | ( | ... | ) | __riscv_vredor_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define vredor_vs_u64m8_u64m1 | ( | ... | ) | __riscv_vredor_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define vredor_vs_u64m8_u64m1_m | ( | ... | ) | __riscv_vredor_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define vredor_vs_u8m1_u8m1 | ( | ... | ) | __riscv_vredor_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define vredor_vs_u8m1_u8m1_m | ( | ... | ) | __riscv_vredor_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define vredor_vs_u8m2_u8m1 | ( | ... | ) | __riscv_vredor_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define vredor_vs_u8m2_u8m1_m | ( | ... | ) | __riscv_vredor_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define vredor_vs_u8m4_u8m1 | ( | ... | ) | __riscv_vredor_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define vredor_vs_u8m4_u8m1_m | ( | ... | ) | __riscv_vredor_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define vredor_vs_u8m8_u8m1 | ( | ... | ) | __riscv_vredor_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define vredor_vs_u8m8_u8m1_m | ( | ... | ) | __riscv_vredor_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define vredor_vs_u8mf2_u8m1 | ( | ... | ) | __riscv_vredor_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define vredor_vs_u8mf2_u8m1_m | ( | ... | ) | __riscv_vredor_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define vredor_vs_u8mf4_u8m1 | ( | ... | ) | __riscv_vredor_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define vredor_vs_u8mf4_u8m1_m | ( | ... | ) | __riscv_vredor_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define vredor_vs_u8mf8_u8m1 | ( | ... | ) | __riscv_vredor_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define vredor_vs_u8mf8_u8m1_m | ( | ... | ) | __riscv_vredor_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i16m1_i16m1 | ( | ... | ) | __riscv_vredsum_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i16m1_i16m1_m | ( | ... | ) | __riscv_vredsum_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i16m2_i16m1 | ( | ... | ) | __riscv_vredsum_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i16m2_i16m1_m | ( | ... | ) | __riscv_vredsum_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i16m4_i16m1 | ( | ... | ) | __riscv_vredsum_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i16m4_i16m1_m | ( | ... | ) | __riscv_vredsum_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i16m8_i16m1 | ( | ... | ) | __riscv_vredsum_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i16m8_i16m1_m | ( | ... | ) | __riscv_vredsum_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i16mf2_i16m1 | ( | ... | ) | __riscv_vredsum_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i16mf2_i16m1_m | ( | ... | ) | __riscv_vredsum_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i16mf4_i16m1 | ( | ... | ) | __riscv_vredsum_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i16mf4_i16m1_m | ( | ... | ) | __riscv_vredsum_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i32m1_i32m1 | ( | ... | ) | __riscv_vredsum_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i32m1_i32m1_m | ( | ... | ) | __riscv_vredsum_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i32m2_i32m1 | ( | ... | ) | __riscv_vredsum_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i32m2_i32m1_m | ( | ... | ) | __riscv_vredsum_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i32m4_i32m1 | ( | ... | ) | __riscv_vredsum_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i32m4_i32m1_m | ( | ... | ) | __riscv_vredsum_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i32m8_i32m1 | ( | ... | ) | __riscv_vredsum_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i32m8_i32m1_m | ( | ... | ) | __riscv_vredsum_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i32mf2_i32m1 | ( | ... | ) | __riscv_vredsum_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i32mf2_i32m1_m | ( | ... | ) | __riscv_vredsum_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i64m1_i64m1 | ( | ... | ) | __riscv_vredsum_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i64m1_i64m1_m | ( | ... | ) | __riscv_vredsum_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i64m2_i64m1 | ( | ... | ) | __riscv_vredsum_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i64m2_i64m1_m | ( | ... | ) | __riscv_vredsum_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i64m4_i64m1 | ( | ... | ) | __riscv_vredsum_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i64m4_i64m1_m | ( | ... | ) | __riscv_vredsum_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i64m8_i64m1 | ( | ... | ) | __riscv_vredsum_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i64m8_i64m1_m | ( | ... | ) | __riscv_vredsum_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i8m1_i8m1 | ( | ... | ) | __riscv_vredsum_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i8m1_i8m1_m | ( | ... | ) | __riscv_vredsum_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i8m2_i8m1 | ( | ... | ) | __riscv_vredsum_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i8m2_i8m1_m | ( | ... | ) | __riscv_vredsum_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i8m4_i8m1 | ( | ... | ) | __riscv_vredsum_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i8m4_i8m1_m | ( | ... | ) | __riscv_vredsum_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i8m8_i8m1 | ( | ... | ) | __riscv_vredsum_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i8m8_i8m1_m | ( | ... | ) | __riscv_vredsum_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i8mf2_i8m1 | ( | ... | ) | __riscv_vredsum_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i8mf2_i8m1_m | ( | ... | ) | __riscv_vredsum_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i8mf4_i8m1 | ( | ... | ) | __riscv_vredsum_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i8mf4_i8m1_m | ( | ... | ) | __riscv_vredsum_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_i8mf8_i8m1 | ( | ... | ) | __riscv_vredsum_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_i8mf8_i8m1_m | ( | ... | ) | __riscv_vredsum_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u16m1_u16m1 | ( | ... | ) | __riscv_vredsum_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u16m1_u16m1_m | ( | ... | ) | __riscv_vredsum_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u16m2_u16m1 | ( | ... | ) | __riscv_vredsum_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u16m2_u16m1_m | ( | ... | ) | __riscv_vredsum_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u16m4_u16m1 | ( | ... | ) | __riscv_vredsum_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u16m4_u16m1_m | ( | ... | ) | __riscv_vredsum_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u16m8_u16m1 | ( | ... | ) | __riscv_vredsum_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u16m8_u16m1_m | ( | ... | ) | __riscv_vredsum_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u16mf2_u16m1 | ( | ... | ) | __riscv_vredsum_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u16mf2_u16m1_m | ( | ... | ) | __riscv_vredsum_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u16mf4_u16m1 | ( | ... | ) | __riscv_vredsum_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u16mf4_u16m1_m | ( | ... | ) | __riscv_vredsum_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u32m1_u32m1 | ( | ... | ) | __riscv_vredsum_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u32m1_u32m1_m | ( | ... | ) | __riscv_vredsum_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u32m2_u32m1 | ( | ... | ) | __riscv_vredsum_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u32m2_u32m1_m | ( | ... | ) | __riscv_vredsum_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u32m4_u32m1 | ( | ... | ) | __riscv_vredsum_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u32m4_u32m1_m | ( | ... | ) | __riscv_vredsum_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u32m8_u32m1 | ( | ... | ) | __riscv_vredsum_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u32m8_u32m1_m | ( | ... | ) | __riscv_vredsum_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u32mf2_u32m1 | ( | ... | ) | __riscv_vredsum_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u32mf2_u32m1_m | ( | ... | ) | __riscv_vredsum_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u64m1_u64m1 | ( | ... | ) | __riscv_vredsum_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u64m1_u64m1_m | ( | ... | ) | __riscv_vredsum_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u64m2_u64m1 | ( | ... | ) | __riscv_vredsum_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u64m2_u64m1_m | ( | ... | ) | __riscv_vredsum_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u64m4_u64m1 | ( | ... | ) | __riscv_vredsum_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u64m4_u64m1_m | ( | ... | ) | __riscv_vredsum_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u64m8_u64m1 | ( | ... | ) | __riscv_vredsum_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u64m8_u64m1_m | ( | ... | ) | __riscv_vredsum_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u8m1_u8m1 | ( | ... | ) | __riscv_vredsum_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u8m1_u8m1_m | ( | ... | ) | __riscv_vredsum_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u8m2_u8m1 | ( | ... | ) | __riscv_vredsum_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u8m2_u8m1_m | ( | ... | ) | __riscv_vredsum_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u8m4_u8m1 | ( | ... | ) | __riscv_vredsum_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u8m4_u8m1_m | ( | ... | ) | __riscv_vredsum_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u8m8_u8m1 | ( | ... | ) | __riscv_vredsum_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u8m8_u8m1_m | ( | ... | ) | __riscv_vredsum_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u8mf2_u8m1 | ( | ... | ) | __riscv_vredsum_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u8mf2_u8m1_m | ( | ... | ) | __riscv_vredsum_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u8mf4_u8m1 | ( | ... | ) | __riscv_vredsum_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u8mf4_u8m1_m | ( | ... | ) | __riscv_vredsum_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define vredsum_vs_u8mf8_u8m1 | ( | ... | ) | __riscv_vredsum_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define vredsum_vs_u8mf8_u8m1_m | ( | ... | ) | __riscv_vredsum_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i16m1_i16m1 | ( | ... | ) | __riscv_vredxor_vs_i16m1_i16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i16m1_i16m1_m | ( | ... | ) | __riscv_vredxor_vs_i16m1_i16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i16m2_i16m1 | ( | ... | ) | __riscv_vredxor_vs_i16m2_i16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i16m2_i16m1_m | ( | ... | ) | __riscv_vredxor_vs_i16m2_i16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i16m4_i16m1 | ( | ... | ) | __riscv_vredxor_vs_i16m4_i16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i16m4_i16m1_m | ( | ... | ) | __riscv_vredxor_vs_i16m4_i16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i16m8_i16m1 | ( | ... | ) | __riscv_vredxor_vs_i16m8_i16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i16m8_i16m1_m | ( | ... | ) | __riscv_vredxor_vs_i16m8_i16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i16mf2_i16m1 | ( | ... | ) | __riscv_vredxor_vs_i16mf2_i16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i16mf2_i16m1_m | ( | ... | ) | __riscv_vredxor_vs_i16mf2_i16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i16mf4_i16m1 | ( | ... | ) | __riscv_vredxor_vs_i16mf4_i16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i16mf4_i16m1_m | ( | ... | ) | __riscv_vredxor_vs_i16mf4_i16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i32m1_i32m1 | ( | ... | ) | __riscv_vredxor_vs_i32m1_i32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i32m1_i32m1_m | ( | ... | ) | __riscv_vredxor_vs_i32m1_i32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i32m2_i32m1 | ( | ... | ) | __riscv_vredxor_vs_i32m2_i32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i32m2_i32m1_m | ( | ... | ) | __riscv_vredxor_vs_i32m2_i32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i32m4_i32m1 | ( | ... | ) | __riscv_vredxor_vs_i32m4_i32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i32m4_i32m1_m | ( | ... | ) | __riscv_vredxor_vs_i32m4_i32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i32m8_i32m1 | ( | ... | ) | __riscv_vredxor_vs_i32m8_i32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i32m8_i32m1_m | ( | ... | ) | __riscv_vredxor_vs_i32m8_i32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i32mf2_i32m1 | ( | ... | ) | __riscv_vredxor_vs_i32mf2_i32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i32mf2_i32m1_m | ( | ... | ) | __riscv_vredxor_vs_i32mf2_i32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i64m1_i64m1 | ( | ... | ) | __riscv_vredxor_vs_i64m1_i64m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i64m1_i64m1_m | ( | ... | ) | __riscv_vredxor_vs_i64m1_i64m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i64m2_i64m1 | ( | ... | ) | __riscv_vredxor_vs_i64m2_i64m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i64m2_i64m1_m | ( | ... | ) | __riscv_vredxor_vs_i64m2_i64m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i64m4_i64m1 | ( | ... | ) | __riscv_vredxor_vs_i64m4_i64m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i64m4_i64m1_m | ( | ... | ) | __riscv_vredxor_vs_i64m4_i64m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i64m8_i64m1 | ( | ... | ) | __riscv_vredxor_vs_i64m8_i64m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i64m8_i64m1_m | ( | ... | ) | __riscv_vredxor_vs_i64m8_i64m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i8m1_i8m1 | ( | ... | ) | __riscv_vredxor_vs_i8m1_i8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i8m1_i8m1_m | ( | ... | ) | __riscv_vredxor_vs_i8m1_i8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i8m2_i8m1 | ( | ... | ) | __riscv_vredxor_vs_i8m2_i8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i8m2_i8m1_m | ( | ... | ) | __riscv_vredxor_vs_i8m2_i8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i8m4_i8m1 | ( | ... | ) | __riscv_vredxor_vs_i8m4_i8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i8m4_i8m1_m | ( | ... | ) | __riscv_vredxor_vs_i8m4_i8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i8m8_i8m1 | ( | ... | ) | __riscv_vredxor_vs_i8m8_i8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i8m8_i8m1_m | ( | ... | ) | __riscv_vredxor_vs_i8m8_i8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i8mf2_i8m1 | ( | ... | ) | __riscv_vredxor_vs_i8mf2_i8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i8mf2_i8m1_m | ( | ... | ) | __riscv_vredxor_vs_i8mf2_i8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i8mf4_i8m1 | ( | ... | ) | __riscv_vredxor_vs_i8mf4_i8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i8mf4_i8m1_m | ( | ... | ) | __riscv_vredxor_vs_i8mf4_i8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_i8mf8_i8m1 | ( | ... | ) | __riscv_vredxor_vs_i8mf8_i8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_i8mf8_i8m1_m | ( | ... | ) | __riscv_vredxor_vs_i8mf8_i8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u16m1_u16m1 | ( | ... | ) | __riscv_vredxor_vs_u16m1_u16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u16m1_u16m1_m | ( | ... | ) | __riscv_vredxor_vs_u16m1_u16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u16m2_u16m1 | ( | ... | ) | __riscv_vredxor_vs_u16m2_u16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u16m2_u16m1_m | ( | ... | ) | __riscv_vredxor_vs_u16m2_u16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u16m4_u16m1 | ( | ... | ) | __riscv_vredxor_vs_u16m4_u16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u16m4_u16m1_m | ( | ... | ) | __riscv_vredxor_vs_u16m4_u16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u16m8_u16m1 | ( | ... | ) | __riscv_vredxor_vs_u16m8_u16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u16m8_u16m1_m | ( | ... | ) | __riscv_vredxor_vs_u16m8_u16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u16mf2_u16m1 | ( | ... | ) | __riscv_vredxor_vs_u16mf2_u16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u16mf2_u16m1_m | ( | ... | ) | __riscv_vredxor_vs_u16mf2_u16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u16mf4_u16m1 | ( | ... | ) | __riscv_vredxor_vs_u16mf4_u16m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u16mf4_u16m1_m | ( | ... | ) | __riscv_vredxor_vs_u16mf4_u16m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u32m1_u32m1 | ( | ... | ) | __riscv_vredxor_vs_u32m1_u32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u32m1_u32m1_m | ( | ... | ) | __riscv_vredxor_vs_u32m1_u32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u32m2_u32m1 | ( | ... | ) | __riscv_vredxor_vs_u32m2_u32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u32m2_u32m1_m | ( | ... | ) | __riscv_vredxor_vs_u32m2_u32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u32m4_u32m1 | ( | ... | ) | __riscv_vredxor_vs_u32m4_u32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u32m4_u32m1_m | ( | ... | ) | __riscv_vredxor_vs_u32m4_u32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u32m8_u32m1 | ( | ... | ) | __riscv_vredxor_vs_u32m8_u32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u32m8_u32m1_m | ( | ... | ) | __riscv_vredxor_vs_u32m8_u32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u32mf2_u32m1 | ( | ... | ) | __riscv_vredxor_vs_u32mf2_u32m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u32mf2_u32m1_m | ( | ... | ) | __riscv_vredxor_vs_u32mf2_u32m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u64m1_u64m1 | ( | ... | ) | __riscv_vredxor_vs_u64m1_u64m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u64m1_u64m1_m | ( | ... | ) | __riscv_vredxor_vs_u64m1_u64m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u64m2_u64m1 | ( | ... | ) | __riscv_vredxor_vs_u64m2_u64m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u64m2_u64m1_m | ( | ... | ) | __riscv_vredxor_vs_u64m2_u64m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u64m4_u64m1 | ( | ... | ) | __riscv_vredxor_vs_u64m4_u64m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u64m4_u64m1_m | ( | ... | ) | __riscv_vredxor_vs_u64m4_u64m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u64m8_u64m1 | ( | ... | ) | __riscv_vredxor_vs_u64m8_u64m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u64m8_u64m1_m | ( | ... | ) | __riscv_vredxor_vs_u64m8_u64m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u8m1_u8m1 | ( | ... | ) | __riscv_vredxor_vs_u8m1_u8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u8m1_u8m1_m | ( | ... | ) | __riscv_vredxor_vs_u8m1_u8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u8m2_u8m1 | ( | ... | ) | __riscv_vredxor_vs_u8m2_u8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u8m2_u8m1_m | ( | ... | ) | __riscv_vredxor_vs_u8m2_u8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u8m4_u8m1 | ( | ... | ) | __riscv_vredxor_vs_u8m4_u8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u8m4_u8m1_m | ( | ... | ) | __riscv_vredxor_vs_u8m4_u8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u8m8_u8m1 | ( | ... | ) | __riscv_vredxor_vs_u8m8_u8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u8m8_u8m1_m | ( | ... | ) | __riscv_vredxor_vs_u8m8_u8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u8mf2_u8m1 | ( | ... | ) | __riscv_vredxor_vs_u8mf2_u8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u8mf2_u8m1_m | ( | ... | ) | __riscv_vredxor_vs_u8mf2_u8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u8mf4_u8m1 | ( | ... | ) | __riscv_vredxor_vs_u8mf4_u8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u8mf4_u8m1_m | ( | ... | ) | __riscv_vredxor_vs_u8mf4_u8m1_tum(__VA_ARGS__) |
| #define vredxor_vs_u8mf8_u8m1 | ( | ... | ) | __riscv_vredxor_vs_u8mf8_u8m1_tu(__VA_ARGS__) |
| #define vredxor_vs_u8mf8_u8m1_m | ( | ... | ) | __riscv_vredxor_vs_u8mf8_u8m1_tum(__VA_ARGS__) |
| #define vreinterpret_v_f16m1_i16m1 | ( | ... | ) | __riscv_vreinterpret_v_f16m1_i16m1(__VA_ARGS__) |
| #define vreinterpret_v_f16m1_u16m1 | ( | ... | ) | __riscv_vreinterpret_v_f16m1_u16m1(__VA_ARGS__) |
| #define vreinterpret_v_f16m2_i16m2 | ( | ... | ) | __riscv_vreinterpret_v_f16m2_i16m2(__VA_ARGS__) |
| #define vreinterpret_v_f16m2_u16m2 | ( | ... | ) | __riscv_vreinterpret_v_f16m2_u16m2(__VA_ARGS__) |
| #define vreinterpret_v_f16m4_i16m4 | ( | ... | ) | __riscv_vreinterpret_v_f16m4_i16m4(__VA_ARGS__) |
| #define vreinterpret_v_f16m4_u16m4 | ( | ... | ) | __riscv_vreinterpret_v_f16m4_u16m4(__VA_ARGS__) |
| #define vreinterpret_v_f16m8_i16m8 | ( | ... | ) | __riscv_vreinterpret_v_f16m8_i16m8(__VA_ARGS__) |
| #define vreinterpret_v_f16m8_u16m8 | ( | ... | ) | __riscv_vreinterpret_v_f16m8_u16m8(__VA_ARGS__) |
| #define vreinterpret_v_f16mf2_i16mf2 | ( | ... | ) | __riscv_vreinterpret_v_f16mf2_i16mf2(__VA_ARGS__) |
| #define vreinterpret_v_f16mf2_u16mf2 | ( | ... | ) | __riscv_vreinterpret_v_f16mf2_u16mf2(__VA_ARGS__) |
| #define vreinterpret_v_f16mf4_i16mf4 | ( | ... | ) | __riscv_vreinterpret_v_f16mf4_i16mf4(__VA_ARGS__) |
| #define vreinterpret_v_f16mf4_u16mf4 | ( | ... | ) | __riscv_vreinterpret_v_f16mf4_u16mf4(__VA_ARGS__) |
| #define vreinterpret_v_f32m1_i32m1 | ( | ... | ) | __riscv_vreinterpret_v_f32m1_i32m1(__VA_ARGS__) |
| #define vreinterpret_v_f32m1_u32m1 | ( | ... | ) | __riscv_vreinterpret_v_f32m1_u32m1(__VA_ARGS__) |
| #define vreinterpret_v_f32m2_i32m2 | ( | ... | ) | __riscv_vreinterpret_v_f32m2_i32m2(__VA_ARGS__) |
| #define vreinterpret_v_f32m2_u32m2 | ( | ... | ) | __riscv_vreinterpret_v_f32m2_u32m2(__VA_ARGS__) |
| #define vreinterpret_v_f32m4_i32m4 | ( | ... | ) | __riscv_vreinterpret_v_f32m4_i32m4(__VA_ARGS__) |
| #define vreinterpret_v_f32m4_u32m4 | ( | ... | ) | __riscv_vreinterpret_v_f32m4_u32m4(__VA_ARGS__) |
| #define vreinterpret_v_f32m8_i32m8 | ( | ... | ) | __riscv_vreinterpret_v_f32m8_i32m8(__VA_ARGS__) |
| #define vreinterpret_v_f32m8_u32m8 | ( | ... | ) | __riscv_vreinterpret_v_f32m8_u32m8(__VA_ARGS__) |
| #define vreinterpret_v_f32mf2_i32mf2 | ( | ... | ) | __riscv_vreinterpret_v_f32mf2_i32mf2(__VA_ARGS__) |
| #define vreinterpret_v_f32mf2_u32mf2 | ( | ... | ) | __riscv_vreinterpret_v_f32mf2_u32mf2(__VA_ARGS__) |
| #define vreinterpret_v_f64m1_i64m1 | ( | ... | ) | __riscv_vreinterpret_v_f64m1_i64m1(__VA_ARGS__) |
| #define vreinterpret_v_f64m1_u64m1 | ( | ... | ) | __riscv_vreinterpret_v_f64m1_u64m1(__VA_ARGS__) |
| #define vreinterpret_v_f64m2_i64m2 | ( | ... | ) | __riscv_vreinterpret_v_f64m2_i64m2(__VA_ARGS__) |
| #define vreinterpret_v_f64m2_u64m2 | ( | ... | ) | __riscv_vreinterpret_v_f64m2_u64m2(__VA_ARGS__) |
| #define vreinterpret_v_f64m4_i64m4 | ( | ... | ) | __riscv_vreinterpret_v_f64m4_i64m4(__VA_ARGS__) |
| #define vreinterpret_v_f64m4_u64m4 | ( | ... | ) | __riscv_vreinterpret_v_f64m4_u64m4(__VA_ARGS__) |
| #define vreinterpret_v_f64m8_i64m8 | ( | ... | ) | __riscv_vreinterpret_v_f64m8_i64m8(__VA_ARGS__) |
| #define vreinterpret_v_f64m8_u64m8 | ( | ... | ) | __riscv_vreinterpret_v_f64m8_u64m8(__VA_ARGS__) |
| #define vreinterpret_v_i16m1_f16m1 | ( | ... | ) | __riscv_vreinterpret_v_i16m1_f16m1(__VA_ARGS__) |
| #define vreinterpret_v_i16m1_i32m1 | ( | ... | ) | __riscv_vreinterpret_v_i16m1_i32m1(__VA_ARGS__) |
| #define vreinterpret_v_i16m1_i64m1 | ( | ... | ) | __riscv_vreinterpret_v_i16m1_i64m1(__VA_ARGS__) |
| #define vreinterpret_v_i16m1_i8m1 | ( | ... | ) | __riscv_vreinterpret_v_i16m1_i8m1(__VA_ARGS__) |
| #define vreinterpret_v_i16m1_u16m1 | ( | ... | ) | __riscv_vreinterpret_v_i16m1_u16m1(__VA_ARGS__) |
| #define vreinterpret_v_i16m2_f16m2 | ( | ... | ) | __riscv_vreinterpret_v_i16m2_f16m2(__VA_ARGS__) |
| #define vreinterpret_v_i16m2_i32m2 | ( | ... | ) | __riscv_vreinterpret_v_i16m2_i32m2(__VA_ARGS__) |
| #define vreinterpret_v_i16m2_i64m2 | ( | ... | ) | __riscv_vreinterpret_v_i16m2_i64m2(__VA_ARGS__) |
| #define vreinterpret_v_i16m2_i8m2 | ( | ... | ) | __riscv_vreinterpret_v_i16m2_i8m2(__VA_ARGS__) |
| #define vreinterpret_v_i16m2_u16m2 | ( | ... | ) | __riscv_vreinterpret_v_i16m2_u16m2(__VA_ARGS__) |
| #define vreinterpret_v_i16m4_f16m4 | ( | ... | ) | __riscv_vreinterpret_v_i16m4_f16m4(__VA_ARGS__) |
| #define vreinterpret_v_i16m4_i32m4 | ( | ... | ) | __riscv_vreinterpret_v_i16m4_i32m4(__VA_ARGS__) |
| #define vreinterpret_v_i16m4_i64m4 | ( | ... | ) | __riscv_vreinterpret_v_i16m4_i64m4(__VA_ARGS__) |
| #define vreinterpret_v_i16m4_i8m4 | ( | ... | ) | __riscv_vreinterpret_v_i16m4_i8m4(__VA_ARGS__) |
| #define vreinterpret_v_i16m4_u16m4 | ( | ... | ) | __riscv_vreinterpret_v_i16m4_u16m4(__VA_ARGS__) |
| #define vreinterpret_v_i16m8_f16m8 | ( | ... | ) | __riscv_vreinterpret_v_i16m8_f16m8(__VA_ARGS__) |
| #define vreinterpret_v_i16m8_i32m8 | ( | ... | ) | __riscv_vreinterpret_v_i16m8_i32m8(__VA_ARGS__) |
| #define vreinterpret_v_i16m8_i64m8 | ( | ... | ) | __riscv_vreinterpret_v_i16m8_i64m8(__VA_ARGS__) |
| #define vreinterpret_v_i16m8_i8m8 | ( | ... | ) | __riscv_vreinterpret_v_i16m8_i8m8(__VA_ARGS__) |
| #define vreinterpret_v_i16m8_u16m8 | ( | ... | ) | __riscv_vreinterpret_v_i16m8_u16m8(__VA_ARGS__) |
| #define vreinterpret_v_i16mf2_f16mf2 | ( | ... | ) | __riscv_vreinterpret_v_i16mf2_f16mf2(__VA_ARGS__) |
| #define vreinterpret_v_i16mf2_i32mf2 | ( | ... | ) | __riscv_vreinterpret_v_i16mf2_i32mf2(__VA_ARGS__) |
| #define vreinterpret_v_i16mf2_i8mf2 | ( | ... | ) | __riscv_vreinterpret_v_i16mf2_i8mf2(__VA_ARGS__) |
| #define vreinterpret_v_i16mf2_u16mf2 | ( | ... | ) | __riscv_vreinterpret_v_i16mf2_u16mf2(__VA_ARGS__) |
| #define vreinterpret_v_i16mf4_f16mf4 | ( | ... | ) | __riscv_vreinterpret_v_i16mf4_f16mf4(__VA_ARGS__) |
| #define vreinterpret_v_i16mf4_i8mf4 | ( | ... | ) | __riscv_vreinterpret_v_i16mf4_i8mf4(__VA_ARGS__) |
| #define vreinterpret_v_i16mf4_u16mf4 | ( | ... | ) | __riscv_vreinterpret_v_i16mf4_u16mf4(__VA_ARGS__) |
| #define vreinterpret_v_i32m1_f32m1 | ( | ... | ) | __riscv_vreinterpret_v_i32m1_f32m1(__VA_ARGS__) |
| #define vreinterpret_v_i32m1_i16m1 | ( | ... | ) | __riscv_vreinterpret_v_i32m1_i16m1(__VA_ARGS__) |
| #define vreinterpret_v_i32m1_i64m1 | ( | ... | ) | __riscv_vreinterpret_v_i32m1_i64m1(__VA_ARGS__) |
| #define vreinterpret_v_i32m1_i8m1 | ( | ... | ) | __riscv_vreinterpret_v_i32m1_i8m1(__VA_ARGS__) |
| #define vreinterpret_v_i32m1_u32m1 | ( | ... | ) | __riscv_vreinterpret_v_i32m1_u32m1(__VA_ARGS__) |
| #define vreinterpret_v_i32m2_f32m2 | ( | ... | ) | __riscv_vreinterpret_v_i32m2_f32m2(__VA_ARGS__) |
| #define vreinterpret_v_i32m2_i16m2 | ( | ... | ) | __riscv_vreinterpret_v_i32m2_i16m2(__VA_ARGS__) |
| #define vreinterpret_v_i32m2_i64m2 | ( | ... | ) | __riscv_vreinterpret_v_i32m2_i64m2(__VA_ARGS__) |
| #define vreinterpret_v_i32m2_i8m2 | ( | ... | ) | __riscv_vreinterpret_v_i32m2_i8m2(__VA_ARGS__) |
| #define vreinterpret_v_i32m2_u32m2 | ( | ... | ) | __riscv_vreinterpret_v_i32m2_u32m2(__VA_ARGS__) |
| #define vreinterpret_v_i32m4_f32m4 | ( | ... | ) | __riscv_vreinterpret_v_i32m4_f32m4(__VA_ARGS__) |
| #define vreinterpret_v_i32m4_i16m4 | ( | ... | ) | __riscv_vreinterpret_v_i32m4_i16m4(__VA_ARGS__) |
| #define vreinterpret_v_i32m4_i64m4 | ( | ... | ) | __riscv_vreinterpret_v_i32m4_i64m4(__VA_ARGS__) |
| #define vreinterpret_v_i32m4_i8m4 | ( | ... | ) | __riscv_vreinterpret_v_i32m4_i8m4(__VA_ARGS__) |
| #define vreinterpret_v_i32m4_u32m4 | ( | ... | ) | __riscv_vreinterpret_v_i32m4_u32m4(__VA_ARGS__) |
| #define vreinterpret_v_i32m8_f32m8 | ( | ... | ) | __riscv_vreinterpret_v_i32m8_f32m8(__VA_ARGS__) |
| #define vreinterpret_v_i32m8_i16m8 | ( | ... | ) | __riscv_vreinterpret_v_i32m8_i16m8(__VA_ARGS__) |
| #define vreinterpret_v_i32m8_i64m8 | ( | ... | ) | __riscv_vreinterpret_v_i32m8_i64m8(__VA_ARGS__) |
| #define vreinterpret_v_i32m8_i8m8 | ( | ... | ) | __riscv_vreinterpret_v_i32m8_i8m8(__VA_ARGS__) |
| #define vreinterpret_v_i32m8_u32m8 | ( | ... | ) | __riscv_vreinterpret_v_i32m8_u32m8(__VA_ARGS__) |
| #define vreinterpret_v_i32mf2_f32mf2 | ( | ... | ) | __riscv_vreinterpret_v_i32mf2_f32mf2(__VA_ARGS__) |
| #define vreinterpret_v_i32mf2_i16mf2 | ( | ... | ) | __riscv_vreinterpret_v_i32mf2_i16mf2(__VA_ARGS__) |
| #define vreinterpret_v_i32mf2_i8mf2 | ( | ... | ) | __riscv_vreinterpret_v_i32mf2_i8mf2(__VA_ARGS__) |
| #define vreinterpret_v_i32mf2_u32mf2 | ( | ... | ) | __riscv_vreinterpret_v_i32mf2_u32mf2(__VA_ARGS__) |
| #define vreinterpret_v_i64m1_f64m1 | ( | ... | ) | __riscv_vreinterpret_v_i64m1_f64m1(__VA_ARGS__) |
| #define vreinterpret_v_i64m1_i16m1 | ( | ... | ) | __riscv_vreinterpret_v_i64m1_i16m1(__VA_ARGS__) |
| #define vreinterpret_v_i64m1_i32m1 | ( | ... | ) | __riscv_vreinterpret_v_i64m1_i32m1(__VA_ARGS__) |
| #define vreinterpret_v_i64m1_i8m1 | ( | ... | ) | __riscv_vreinterpret_v_i64m1_i8m1(__VA_ARGS__) |
| #define vreinterpret_v_i64m1_u64m1 | ( | ... | ) | __riscv_vreinterpret_v_i64m1_u64m1(__VA_ARGS__) |
| #define vreinterpret_v_i64m2_f64m2 | ( | ... | ) | __riscv_vreinterpret_v_i64m2_f64m2(__VA_ARGS__) |
| #define vreinterpret_v_i64m2_i16m2 | ( | ... | ) | __riscv_vreinterpret_v_i64m2_i16m2(__VA_ARGS__) |
| #define vreinterpret_v_i64m2_i32m2 | ( | ... | ) | __riscv_vreinterpret_v_i64m2_i32m2(__VA_ARGS__) |
| #define vreinterpret_v_i64m2_i8m2 | ( | ... | ) | __riscv_vreinterpret_v_i64m2_i8m2(__VA_ARGS__) |
| #define vreinterpret_v_i64m2_u64m2 | ( | ... | ) | __riscv_vreinterpret_v_i64m2_u64m2(__VA_ARGS__) |
| #define vreinterpret_v_i64m4_f64m4 | ( | ... | ) | __riscv_vreinterpret_v_i64m4_f64m4(__VA_ARGS__) |
| #define vreinterpret_v_i64m4_i16m4 | ( | ... | ) | __riscv_vreinterpret_v_i64m4_i16m4(__VA_ARGS__) |
| #define vreinterpret_v_i64m4_i32m4 | ( | ... | ) | __riscv_vreinterpret_v_i64m4_i32m4(__VA_ARGS__) |
| #define vreinterpret_v_i64m4_i8m4 | ( | ... | ) | __riscv_vreinterpret_v_i64m4_i8m4(__VA_ARGS__) |
| #define vreinterpret_v_i64m4_u64m4 | ( | ... | ) | __riscv_vreinterpret_v_i64m4_u64m4(__VA_ARGS__) |
| #define vreinterpret_v_i64m8_f64m8 | ( | ... | ) | __riscv_vreinterpret_v_i64m8_f64m8(__VA_ARGS__) |
| #define vreinterpret_v_i64m8_i16m8 | ( | ... | ) | __riscv_vreinterpret_v_i64m8_i16m8(__VA_ARGS__) |
| #define vreinterpret_v_i64m8_i32m8 | ( | ... | ) | __riscv_vreinterpret_v_i64m8_i32m8(__VA_ARGS__) |
| #define vreinterpret_v_i64m8_i8m8 | ( | ... | ) | __riscv_vreinterpret_v_i64m8_i8m8(__VA_ARGS__) |
| #define vreinterpret_v_i64m8_u64m8 | ( | ... | ) | __riscv_vreinterpret_v_i64m8_u64m8(__VA_ARGS__) |
| #define vreinterpret_v_i8m1_i16m1 | ( | ... | ) | __riscv_vreinterpret_v_i8m1_i16m1(__VA_ARGS__) |
| #define vreinterpret_v_i8m1_i32m1 | ( | ... | ) | __riscv_vreinterpret_v_i8m1_i32m1(__VA_ARGS__) |
| #define vreinterpret_v_i8m1_i64m1 | ( | ... | ) | __riscv_vreinterpret_v_i8m1_i64m1(__VA_ARGS__) |
| #define vreinterpret_v_i8m1_u8m1 | ( | ... | ) | __riscv_vreinterpret_v_i8m1_u8m1(__VA_ARGS__) |
| #define vreinterpret_v_i8m2_i16m2 | ( | ... | ) | __riscv_vreinterpret_v_i8m2_i16m2(__VA_ARGS__) |
| #define vreinterpret_v_i8m2_i32m2 | ( | ... | ) | __riscv_vreinterpret_v_i8m2_i32m2(__VA_ARGS__) |
| #define vreinterpret_v_i8m2_i64m2 | ( | ... | ) | __riscv_vreinterpret_v_i8m2_i64m2(__VA_ARGS__) |
| #define vreinterpret_v_i8m2_u8m2 | ( | ... | ) | __riscv_vreinterpret_v_i8m2_u8m2(__VA_ARGS__) |
| #define vreinterpret_v_i8m4_i16m4 | ( | ... | ) | __riscv_vreinterpret_v_i8m4_i16m4(__VA_ARGS__) |
| #define vreinterpret_v_i8m4_i32m4 | ( | ... | ) | __riscv_vreinterpret_v_i8m4_i32m4(__VA_ARGS__) |
| #define vreinterpret_v_i8m4_i64m4 | ( | ... | ) | __riscv_vreinterpret_v_i8m4_i64m4(__VA_ARGS__) |
| #define vreinterpret_v_i8m4_u8m4 | ( | ... | ) | __riscv_vreinterpret_v_i8m4_u8m4(__VA_ARGS__) |
| #define vreinterpret_v_i8m8_i16m8 | ( | ... | ) | __riscv_vreinterpret_v_i8m8_i16m8(__VA_ARGS__) |
| #define vreinterpret_v_i8m8_i32m8 | ( | ... | ) | __riscv_vreinterpret_v_i8m8_i32m8(__VA_ARGS__) |
| #define vreinterpret_v_i8m8_i64m8 | ( | ... | ) | __riscv_vreinterpret_v_i8m8_i64m8(__VA_ARGS__) |
| #define vreinterpret_v_i8m8_u8m8 | ( | ... | ) | __riscv_vreinterpret_v_i8m8_u8m8(__VA_ARGS__) |
| #define vreinterpret_v_i8mf2_i16mf2 | ( | ... | ) | __riscv_vreinterpret_v_i8mf2_i16mf2(__VA_ARGS__) |
| #define vreinterpret_v_i8mf2_i32mf2 | ( | ... | ) | __riscv_vreinterpret_v_i8mf2_i32mf2(__VA_ARGS__) |
| #define vreinterpret_v_i8mf2_u8mf2 | ( | ... | ) | __riscv_vreinterpret_v_i8mf2_u8mf2(__VA_ARGS__) |
| #define vreinterpret_v_i8mf4_i16mf4 | ( | ... | ) | __riscv_vreinterpret_v_i8mf4_i16mf4(__VA_ARGS__) |
| #define vreinterpret_v_i8mf4_u8mf4 | ( | ... | ) | __riscv_vreinterpret_v_i8mf4_u8mf4(__VA_ARGS__) |
| #define vreinterpret_v_i8mf8_u8mf8 | ( | ... | ) | __riscv_vreinterpret_v_i8mf8_u8mf8(__VA_ARGS__) |
| #define vreinterpret_v_u16m1_f16m1 | ( | ... | ) | __riscv_vreinterpret_v_u16m1_f16m1(__VA_ARGS__) |
| #define vreinterpret_v_u16m1_i16m1 | ( | ... | ) | __riscv_vreinterpret_v_u16m1_i16m1(__VA_ARGS__) |
| #define vreinterpret_v_u16m1_u32m1 | ( | ... | ) | __riscv_vreinterpret_v_u16m1_u32m1(__VA_ARGS__) |
| #define vreinterpret_v_u16m1_u64m1 | ( | ... | ) | __riscv_vreinterpret_v_u16m1_u64m1(__VA_ARGS__) |
| #define vreinterpret_v_u16m1_u8m1 | ( | ... | ) | __riscv_vreinterpret_v_u16m1_u8m1(__VA_ARGS__) |
| #define vreinterpret_v_u16m2_f16m2 | ( | ... | ) | __riscv_vreinterpret_v_u16m2_f16m2(__VA_ARGS__) |
| #define vreinterpret_v_u16m2_i16m2 | ( | ... | ) | __riscv_vreinterpret_v_u16m2_i16m2(__VA_ARGS__) |
| #define vreinterpret_v_u16m2_u32m2 | ( | ... | ) | __riscv_vreinterpret_v_u16m2_u32m2(__VA_ARGS__) |
| #define vreinterpret_v_u16m2_u64m2 | ( | ... | ) | __riscv_vreinterpret_v_u16m2_u64m2(__VA_ARGS__) |
| #define vreinterpret_v_u16m2_u8m2 | ( | ... | ) | __riscv_vreinterpret_v_u16m2_u8m2(__VA_ARGS__) |
| #define vreinterpret_v_u16m4_f16m4 | ( | ... | ) | __riscv_vreinterpret_v_u16m4_f16m4(__VA_ARGS__) |
| #define vreinterpret_v_u16m4_i16m4 | ( | ... | ) | __riscv_vreinterpret_v_u16m4_i16m4(__VA_ARGS__) |
| #define vreinterpret_v_u16m4_u32m4 | ( | ... | ) | __riscv_vreinterpret_v_u16m4_u32m4(__VA_ARGS__) |
| #define vreinterpret_v_u16m4_u64m4 | ( | ... | ) | __riscv_vreinterpret_v_u16m4_u64m4(__VA_ARGS__) |
| #define vreinterpret_v_u16m4_u8m4 | ( | ... | ) | __riscv_vreinterpret_v_u16m4_u8m4(__VA_ARGS__) |
| #define vreinterpret_v_u16m8_f16m8 | ( | ... | ) | __riscv_vreinterpret_v_u16m8_f16m8(__VA_ARGS__) |
| #define vreinterpret_v_u16m8_i16m8 | ( | ... | ) | __riscv_vreinterpret_v_u16m8_i16m8(__VA_ARGS__) |
| #define vreinterpret_v_u16m8_u32m8 | ( | ... | ) | __riscv_vreinterpret_v_u16m8_u32m8(__VA_ARGS__) |
| #define vreinterpret_v_u16m8_u64m8 | ( | ... | ) | __riscv_vreinterpret_v_u16m8_u64m8(__VA_ARGS__) |
| #define vreinterpret_v_u16m8_u8m8 | ( | ... | ) | __riscv_vreinterpret_v_u16m8_u8m8(__VA_ARGS__) |
| #define vreinterpret_v_u16mf2_f16mf2 | ( | ... | ) | __riscv_vreinterpret_v_u16mf2_f16mf2(__VA_ARGS__) |
| #define vreinterpret_v_u16mf2_i16mf2 | ( | ... | ) | __riscv_vreinterpret_v_u16mf2_i16mf2(__VA_ARGS__) |
| #define vreinterpret_v_u16mf2_u32mf2 | ( | ... | ) | __riscv_vreinterpret_v_u16mf2_u32mf2(__VA_ARGS__) |
| #define vreinterpret_v_u16mf2_u8mf2 | ( | ... | ) | __riscv_vreinterpret_v_u16mf2_u8mf2(__VA_ARGS__) |
| #define vreinterpret_v_u16mf4_f16mf4 | ( | ... | ) | __riscv_vreinterpret_v_u16mf4_f16mf4(__VA_ARGS__) |
| #define vreinterpret_v_u16mf4_i16mf4 | ( | ... | ) | __riscv_vreinterpret_v_u16mf4_i16mf4(__VA_ARGS__) |
| #define vreinterpret_v_u16mf4_u8mf4 | ( | ... | ) | __riscv_vreinterpret_v_u16mf4_u8mf4(__VA_ARGS__) |
| #define vreinterpret_v_u32m1_f32m1 | ( | ... | ) | __riscv_vreinterpret_v_u32m1_f32m1(__VA_ARGS__) |
| #define vreinterpret_v_u32m1_i32m1 | ( | ... | ) | __riscv_vreinterpret_v_u32m1_i32m1(__VA_ARGS__) |
| #define vreinterpret_v_u32m1_u16m1 | ( | ... | ) | __riscv_vreinterpret_v_u32m1_u16m1(__VA_ARGS__) |
| #define vreinterpret_v_u32m1_u64m1 | ( | ... | ) | __riscv_vreinterpret_v_u32m1_u64m1(__VA_ARGS__) |
| #define vreinterpret_v_u32m1_u8m1 | ( | ... | ) | __riscv_vreinterpret_v_u32m1_u8m1(__VA_ARGS__) |
| #define vreinterpret_v_u32m2_f32m2 | ( | ... | ) | __riscv_vreinterpret_v_u32m2_f32m2(__VA_ARGS__) |
| #define vreinterpret_v_u32m2_i32m2 | ( | ... | ) | __riscv_vreinterpret_v_u32m2_i32m2(__VA_ARGS__) |
| #define vreinterpret_v_u32m2_u16m2 | ( | ... | ) | __riscv_vreinterpret_v_u32m2_u16m2(__VA_ARGS__) |
| #define vreinterpret_v_u32m2_u64m2 | ( | ... | ) | __riscv_vreinterpret_v_u32m2_u64m2(__VA_ARGS__) |
| #define vreinterpret_v_u32m2_u8m2 | ( | ... | ) | __riscv_vreinterpret_v_u32m2_u8m2(__VA_ARGS__) |
| #define vreinterpret_v_u32m4_f32m4 | ( | ... | ) | __riscv_vreinterpret_v_u32m4_f32m4(__VA_ARGS__) |
| #define vreinterpret_v_u32m4_i32m4 | ( | ... | ) | __riscv_vreinterpret_v_u32m4_i32m4(__VA_ARGS__) |
| #define vreinterpret_v_u32m4_u16m4 | ( | ... | ) | __riscv_vreinterpret_v_u32m4_u16m4(__VA_ARGS__) |
| #define vreinterpret_v_u32m4_u64m4 | ( | ... | ) | __riscv_vreinterpret_v_u32m4_u64m4(__VA_ARGS__) |
| #define vreinterpret_v_u32m4_u8m4 | ( | ... | ) | __riscv_vreinterpret_v_u32m4_u8m4(__VA_ARGS__) |
| #define vreinterpret_v_u32m8_f32m8 | ( | ... | ) | __riscv_vreinterpret_v_u32m8_f32m8(__VA_ARGS__) |
| #define vreinterpret_v_u32m8_i32m8 | ( | ... | ) | __riscv_vreinterpret_v_u32m8_i32m8(__VA_ARGS__) |
| #define vreinterpret_v_u32m8_u16m8 | ( | ... | ) | __riscv_vreinterpret_v_u32m8_u16m8(__VA_ARGS__) |
| #define vreinterpret_v_u32m8_u64m8 | ( | ... | ) | __riscv_vreinterpret_v_u32m8_u64m8(__VA_ARGS__) |
| #define vreinterpret_v_u32m8_u8m8 | ( | ... | ) | __riscv_vreinterpret_v_u32m8_u8m8(__VA_ARGS__) |
| #define vreinterpret_v_u32mf2_f32mf2 | ( | ... | ) | __riscv_vreinterpret_v_u32mf2_f32mf2(__VA_ARGS__) |
| #define vreinterpret_v_u32mf2_i32mf2 | ( | ... | ) | __riscv_vreinterpret_v_u32mf2_i32mf2(__VA_ARGS__) |
| #define vreinterpret_v_u32mf2_u16mf2 | ( | ... | ) | __riscv_vreinterpret_v_u32mf2_u16mf2(__VA_ARGS__) |
| #define vreinterpret_v_u32mf2_u8mf2 | ( | ... | ) | __riscv_vreinterpret_v_u32mf2_u8mf2(__VA_ARGS__) |
| #define vreinterpret_v_u64m1_f64m1 | ( | ... | ) | __riscv_vreinterpret_v_u64m1_f64m1(__VA_ARGS__) |
| #define vreinterpret_v_u64m1_i64m1 | ( | ... | ) | __riscv_vreinterpret_v_u64m1_i64m1(__VA_ARGS__) |
| #define vreinterpret_v_u64m1_u16m1 | ( | ... | ) | __riscv_vreinterpret_v_u64m1_u16m1(__VA_ARGS__) |
| #define vreinterpret_v_u64m1_u32m1 | ( | ... | ) | __riscv_vreinterpret_v_u64m1_u32m1(__VA_ARGS__) |
| #define vreinterpret_v_u64m1_u8m1 | ( | ... | ) | __riscv_vreinterpret_v_u64m1_u8m1(__VA_ARGS__) |
| #define vreinterpret_v_u64m2_f64m2 | ( | ... | ) | __riscv_vreinterpret_v_u64m2_f64m2(__VA_ARGS__) |
| #define vreinterpret_v_u64m2_i64m2 | ( | ... | ) | __riscv_vreinterpret_v_u64m2_i64m2(__VA_ARGS__) |
| #define vreinterpret_v_u64m2_u16m2 | ( | ... | ) | __riscv_vreinterpret_v_u64m2_u16m2(__VA_ARGS__) |
| #define vreinterpret_v_u64m2_u32m2 | ( | ... | ) | __riscv_vreinterpret_v_u64m2_u32m2(__VA_ARGS__) |
| #define vreinterpret_v_u64m2_u8m2 | ( | ... | ) | __riscv_vreinterpret_v_u64m2_u8m2(__VA_ARGS__) |
| #define vreinterpret_v_u64m4_f64m4 | ( | ... | ) | __riscv_vreinterpret_v_u64m4_f64m4(__VA_ARGS__) |
| #define vreinterpret_v_u64m4_i64m4 | ( | ... | ) | __riscv_vreinterpret_v_u64m4_i64m4(__VA_ARGS__) |
| #define vreinterpret_v_u64m4_u16m4 | ( | ... | ) | __riscv_vreinterpret_v_u64m4_u16m4(__VA_ARGS__) |
| #define vreinterpret_v_u64m4_u32m4 | ( | ... | ) | __riscv_vreinterpret_v_u64m4_u32m4(__VA_ARGS__) |
| #define vreinterpret_v_u64m4_u8m4 | ( | ... | ) | __riscv_vreinterpret_v_u64m4_u8m4(__VA_ARGS__) |
| #define vreinterpret_v_u64m8_f64m8 | ( | ... | ) | __riscv_vreinterpret_v_u64m8_f64m8(__VA_ARGS__) |
| #define vreinterpret_v_u64m8_i64m8 | ( | ... | ) | __riscv_vreinterpret_v_u64m8_i64m8(__VA_ARGS__) |
| #define vreinterpret_v_u64m8_u16m8 | ( | ... | ) | __riscv_vreinterpret_v_u64m8_u16m8(__VA_ARGS__) |
| #define vreinterpret_v_u64m8_u32m8 | ( | ... | ) | __riscv_vreinterpret_v_u64m8_u32m8(__VA_ARGS__) |
| #define vreinterpret_v_u64m8_u8m8 | ( | ... | ) | __riscv_vreinterpret_v_u64m8_u8m8(__VA_ARGS__) |
| #define vreinterpret_v_u8m1_i8m1 | ( | ... | ) | __riscv_vreinterpret_v_u8m1_i8m1(__VA_ARGS__) |
| #define vreinterpret_v_u8m1_u16m1 | ( | ... | ) | __riscv_vreinterpret_v_u8m1_u16m1(__VA_ARGS__) |
| #define vreinterpret_v_u8m1_u32m1 | ( | ... | ) | __riscv_vreinterpret_v_u8m1_u32m1(__VA_ARGS__) |
| #define vreinterpret_v_u8m1_u64m1 | ( | ... | ) | __riscv_vreinterpret_v_u8m1_u64m1(__VA_ARGS__) |
| #define vreinterpret_v_u8m2_i8m2 | ( | ... | ) | __riscv_vreinterpret_v_u8m2_i8m2(__VA_ARGS__) |
| #define vreinterpret_v_u8m2_u16m2 | ( | ... | ) | __riscv_vreinterpret_v_u8m2_u16m2(__VA_ARGS__) |
| #define vreinterpret_v_u8m2_u32m2 | ( | ... | ) | __riscv_vreinterpret_v_u8m2_u32m2(__VA_ARGS__) |
| #define vreinterpret_v_u8m2_u64m2 | ( | ... | ) | __riscv_vreinterpret_v_u8m2_u64m2(__VA_ARGS__) |
| #define vreinterpret_v_u8m4_i8m4 | ( | ... | ) | __riscv_vreinterpret_v_u8m4_i8m4(__VA_ARGS__) |
| #define vreinterpret_v_u8m4_u16m4 | ( | ... | ) | __riscv_vreinterpret_v_u8m4_u16m4(__VA_ARGS__) |
| #define vreinterpret_v_u8m4_u32m4 | ( | ... | ) | __riscv_vreinterpret_v_u8m4_u32m4(__VA_ARGS__) |
| #define vreinterpret_v_u8m4_u64m4 | ( | ... | ) | __riscv_vreinterpret_v_u8m4_u64m4(__VA_ARGS__) |
| #define vreinterpret_v_u8m8_i8m8 | ( | ... | ) | __riscv_vreinterpret_v_u8m8_i8m8(__VA_ARGS__) |
| #define vreinterpret_v_u8m8_u16m8 | ( | ... | ) | __riscv_vreinterpret_v_u8m8_u16m8(__VA_ARGS__) |
| #define vreinterpret_v_u8m8_u32m8 | ( | ... | ) | __riscv_vreinterpret_v_u8m8_u32m8(__VA_ARGS__) |
| #define vreinterpret_v_u8m8_u64m8 | ( | ... | ) | __riscv_vreinterpret_v_u8m8_u64m8(__VA_ARGS__) |
| #define vreinterpret_v_u8mf2_i8mf2 | ( | ... | ) | __riscv_vreinterpret_v_u8mf2_i8mf2(__VA_ARGS__) |
| #define vreinterpret_v_u8mf2_u16mf2 | ( | ... | ) | __riscv_vreinterpret_v_u8mf2_u16mf2(__VA_ARGS__) |
| #define vreinterpret_v_u8mf2_u32mf2 | ( | ... | ) | __riscv_vreinterpret_v_u8mf2_u32mf2(__VA_ARGS__) |
| #define vreinterpret_v_u8mf4_i8mf4 | ( | ... | ) | __riscv_vreinterpret_v_u8mf4_i8mf4(__VA_ARGS__) |
| #define vreinterpret_v_u8mf4_u16mf4 | ( | ... | ) | __riscv_vreinterpret_v_u8mf4_u16mf4(__VA_ARGS__) |
| #define vreinterpret_v_u8mf8_i8mf8 | ( | ... | ) | __riscv_vreinterpret_v_u8mf8_i8mf8(__VA_ARGS__) |
| #define vrem_vv_i16m1 | ( | ... | ) | __riscv_vrem_vv_i16m1(__VA_ARGS__) |
| #define vrem_vv_i16m1_m | ( | ... | ) | __riscv_vrem_vv_i16m1_tumu(__VA_ARGS__) |
| #define vrem_vv_i16m2 | ( | ... | ) | __riscv_vrem_vv_i16m2(__VA_ARGS__) |
| #define vrem_vv_i16m2_m | ( | ... | ) | __riscv_vrem_vv_i16m2_tumu(__VA_ARGS__) |
| #define vrem_vv_i16m4 | ( | ... | ) | __riscv_vrem_vv_i16m4(__VA_ARGS__) |
| #define vrem_vv_i16m4_m | ( | ... | ) | __riscv_vrem_vv_i16m4_tumu(__VA_ARGS__) |
| #define vrem_vv_i16m8 | ( | ... | ) | __riscv_vrem_vv_i16m8(__VA_ARGS__) |
| #define vrem_vv_i16m8_m | ( | ... | ) | __riscv_vrem_vv_i16m8_tumu(__VA_ARGS__) |
| #define vrem_vv_i16mf2 | ( | ... | ) | __riscv_vrem_vv_i16mf2(__VA_ARGS__) |
| #define vrem_vv_i16mf2_m | ( | ... | ) | __riscv_vrem_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vrem_vv_i16mf4 | ( | ... | ) | __riscv_vrem_vv_i16mf4(__VA_ARGS__) |
| #define vrem_vv_i16mf4_m | ( | ... | ) | __riscv_vrem_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vrem_vv_i32m1 | ( | ... | ) | __riscv_vrem_vv_i32m1(__VA_ARGS__) |
| #define vrem_vv_i32m1_m | ( | ... | ) | __riscv_vrem_vv_i32m1_tumu(__VA_ARGS__) |
| #define vrem_vv_i32m2 | ( | ... | ) | __riscv_vrem_vv_i32m2(__VA_ARGS__) |
| #define vrem_vv_i32m2_m | ( | ... | ) | __riscv_vrem_vv_i32m2_tumu(__VA_ARGS__) |
| #define vrem_vv_i32m4 | ( | ... | ) | __riscv_vrem_vv_i32m4(__VA_ARGS__) |
| #define vrem_vv_i32m4_m | ( | ... | ) | __riscv_vrem_vv_i32m4_tumu(__VA_ARGS__) |
| #define vrem_vv_i32m8 | ( | ... | ) | __riscv_vrem_vv_i32m8(__VA_ARGS__) |
| #define vrem_vv_i32m8_m | ( | ... | ) | __riscv_vrem_vv_i32m8_tumu(__VA_ARGS__) |
| #define vrem_vv_i32mf2 | ( | ... | ) | __riscv_vrem_vv_i32mf2(__VA_ARGS__) |
| #define vrem_vv_i32mf2_m | ( | ... | ) | __riscv_vrem_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vrem_vv_i64m1 | ( | ... | ) | __riscv_vrem_vv_i64m1(__VA_ARGS__) |
| #define vrem_vv_i64m1_m | ( | ... | ) | __riscv_vrem_vv_i64m1_tumu(__VA_ARGS__) |
| #define vrem_vv_i64m2 | ( | ... | ) | __riscv_vrem_vv_i64m2(__VA_ARGS__) |
| #define vrem_vv_i64m2_m | ( | ... | ) | __riscv_vrem_vv_i64m2_tumu(__VA_ARGS__) |
| #define vrem_vv_i64m4 | ( | ... | ) | __riscv_vrem_vv_i64m4(__VA_ARGS__) |
| #define vrem_vv_i64m4_m | ( | ... | ) | __riscv_vrem_vv_i64m4_tumu(__VA_ARGS__) |
| #define vrem_vv_i64m8 | ( | ... | ) | __riscv_vrem_vv_i64m8(__VA_ARGS__) |
| #define vrem_vv_i64m8_m | ( | ... | ) | __riscv_vrem_vv_i64m8_tumu(__VA_ARGS__) |
| #define vrem_vv_i8m1 | ( | ... | ) | __riscv_vrem_vv_i8m1(__VA_ARGS__) |
| #define vrem_vv_i8m1_m | ( | ... | ) | __riscv_vrem_vv_i8m1_tumu(__VA_ARGS__) |
| #define vrem_vv_i8m2 | ( | ... | ) | __riscv_vrem_vv_i8m2(__VA_ARGS__) |
| #define vrem_vv_i8m2_m | ( | ... | ) | __riscv_vrem_vv_i8m2_tumu(__VA_ARGS__) |
| #define vrem_vv_i8m4 | ( | ... | ) | __riscv_vrem_vv_i8m4(__VA_ARGS__) |
| #define vrem_vv_i8m4_m | ( | ... | ) | __riscv_vrem_vv_i8m4_tumu(__VA_ARGS__) |
| #define vrem_vv_i8m8 | ( | ... | ) | __riscv_vrem_vv_i8m8(__VA_ARGS__) |
| #define vrem_vv_i8m8_m | ( | ... | ) | __riscv_vrem_vv_i8m8_tumu(__VA_ARGS__) |
| #define vrem_vv_i8mf2 | ( | ... | ) | __riscv_vrem_vv_i8mf2(__VA_ARGS__) |
| #define vrem_vv_i8mf2_m | ( | ... | ) | __riscv_vrem_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vrem_vv_i8mf4 | ( | ... | ) | __riscv_vrem_vv_i8mf4(__VA_ARGS__) |
| #define vrem_vv_i8mf4_m | ( | ... | ) | __riscv_vrem_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vrem_vv_i8mf8 | ( | ... | ) | __riscv_vrem_vv_i8mf8(__VA_ARGS__) |
| #define vrem_vv_i8mf8_m | ( | ... | ) | __riscv_vrem_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vrem_vx_i16m1 | ( | ... | ) | __riscv_vrem_vx_i16m1(__VA_ARGS__) |
| #define vrem_vx_i16m1_m | ( | ... | ) | __riscv_vrem_vx_i16m1_tumu(__VA_ARGS__) |
| #define vrem_vx_i16m2 | ( | ... | ) | __riscv_vrem_vx_i16m2(__VA_ARGS__) |
| #define vrem_vx_i16m2_m | ( | ... | ) | __riscv_vrem_vx_i16m2_tumu(__VA_ARGS__) |
| #define vrem_vx_i16m4 | ( | ... | ) | __riscv_vrem_vx_i16m4(__VA_ARGS__) |
| #define vrem_vx_i16m4_m | ( | ... | ) | __riscv_vrem_vx_i16m4_tumu(__VA_ARGS__) |
| #define vrem_vx_i16m8 | ( | ... | ) | __riscv_vrem_vx_i16m8(__VA_ARGS__) |
| #define vrem_vx_i16m8_m | ( | ... | ) | __riscv_vrem_vx_i16m8_tumu(__VA_ARGS__) |
| #define vrem_vx_i16mf2 | ( | ... | ) | __riscv_vrem_vx_i16mf2(__VA_ARGS__) |
| #define vrem_vx_i16mf2_m | ( | ... | ) | __riscv_vrem_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vrem_vx_i16mf4 | ( | ... | ) | __riscv_vrem_vx_i16mf4(__VA_ARGS__) |
| #define vrem_vx_i16mf4_m | ( | ... | ) | __riscv_vrem_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vrem_vx_i32m1 | ( | ... | ) | __riscv_vrem_vx_i32m1(__VA_ARGS__) |
| #define vrem_vx_i32m1_m | ( | ... | ) | __riscv_vrem_vx_i32m1_tumu(__VA_ARGS__) |
| #define vrem_vx_i32m2 | ( | ... | ) | __riscv_vrem_vx_i32m2(__VA_ARGS__) |
| #define vrem_vx_i32m2_m | ( | ... | ) | __riscv_vrem_vx_i32m2_tumu(__VA_ARGS__) |
| #define vrem_vx_i32m4 | ( | ... | ) | __riscv_vrem_vx_i32m4(__VA_ARGS__) |
| #define vrem_vx_i32m4_m | ( | ... | ) | __riscv_vrem_vx_i32m4_tumu(__VA_ARGS__) |
| #define vrem_vx_i32m8 | ( | ... | ) | __riscv_vrem_vx_i32m8(__VA_ARGS__) |
| #define vrem_vx_i32m8_m | ( | ... | ) | __riscv_vrem_vx_i32m8_tumu(__VA_ARGS__) |
| #define vrem_vx_i32mf2 | ( | ... | ) | __riscv_vrem_vx_i32mf2(__VA_ARGS__) |
| #define vrem_vx_i32mf2_m | ( | ... | ) | __riscv_vrem_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vrem_vx_i64m1 | ( | ... | ) | __riscv_vrem_vx_i64m1(__VA_ARGS__) |
| #define vrem_vx_i64m1_m | ( | ... | ) | __riscv_vrem_vx_i64m1_tumu(__VA_ARGS__) |
| #define vrem_vx_i64m2 | ( | ... | ) | __riscv_vrem_vx_i64m2(__VA_ARGS__) |
| #define vrem_vx_i64m2_m | ( | ... | ) | __riscv_vrem_vx_i64m2_tumu(__VA_ARGS__) |
| #define vrem_vx_i64m4 | ( | ... | ) | __riscv_vrem_vx_i64m4(__VA_ARGS__) |
| #define vrem_vx_i64m4_m | ( | ... | ) | __riscv_vrem_vx_i64m4_tumu(__VA_ARGS__) |
| #define vrem_vx_i64m8 | ( | ... | ) | __riscv_vrem_vx_i64m8(__VA_ARGS__) |
| #define vrem_vx_i64m8_m | ( | ... | ) | __riscv_vrem_vx_i64m8_tumu(__VA_ARGS__) |
| #define vrem_vx_i8m1 | ( | ... | ) | __riscv_vrem_vx_i8m1(__VA_ARGS__) |
| #define vrem_vx_i8m1_m | ( | ... | ) | __riscv_vrem_vx_i8m1_tumu(__VA_ARGS__) |
| #define vrem_vx_i8m2 | ( | ... | ) | __riscv_vrem_vx_i8m2(__VA_ARGS__) |
| #define vrem_vx_i8m2_m | ( | ... | ) | __riscv_vrem_vx_i8m2_tumu(__VA_ARGS__) |
| #define vrem_vx_i8m4 | ( | ... | ) | __riscv_vrem_vx_i8m4(__VA_ARGS__) |
| #define vrem_vx_i8m4_m | ( | ... | ) | __riscv_vrem_vx_i8m4_tumu(__VA_ARGS__) |
| #define vrem_vx_i8m8 | ( | ... | ) | __riscv_vrem_vx_i8m8(__VA_ARGS__) |
| #define vrem_vx_i8m8_m | ( | ... | ) | __riscv_vrem_vx_i8m8_tumu(__VA_ARGS__) |
| #define vrem_vx_i8mf2 | ( | ... | ) | __riscv_vrem_vx_i8mf2(__VA_ARGS__) |
| #define vrem_vx_i8mf2_m | ( | ... | ) | __riscv_vrem_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vrem_vx_i8mf4 | ( | ... | ) | __riscv_vrem_vx_i8mf4(__VA_ARGS__) |
| #define vrem_vx_i8mf4_m | ( | ... | ) | __riscv_vrem_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vrem_vx_i8mf8 | ( | ... | ) | __riscv_vrem_vx_i8mf8(__VA_ARGS__) |
| #define vrem_vx_i8mf8_m | ( | ... | ) | __riscv_vrem_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vremu_vv_u16m1 | ( | ... | ) | __riscv_vremu_vv_u16m1(__VA_ARGS__) |
| #define vremu_vv_u16m1_m | ( | ... | ) | __riscv_vremu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vremu_vv_u16m2 | ( | ... | ) | __riscv_vremu_vv_u16m2(__VA_ARGS__) |
| #define vremu_vv_u16m2_m | ( | ... | ) | __riscv_vremu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vremu_vv_u16m4 | ( | ... | ) | __riscv_vremu_vv_u16m4(__VA_ARGS__) |
| #define vremu_vv_u16m4_m | ( | ... | ) | __riscv_vremu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vremu_vv_u16m8 | ( | ... | ) | __riscv_vremu_vv_u16m8(__VA_ARGS__) |
| #define vremu_vv_u16m8_m | ( | ... | ) | __riscv_vremu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vremu_vv_u16mf2 | ( | ... | ) | __riscv_vremu_vv_u16mf2(__VA_ARGS__) |
| #define vremu_vv_u16mf2_m | ( | ... | ) | __riscv_vremu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vremu_vv_u16mf4 | ( | ... | ) | __riscv_vremu_vv_u16mf4(__VA_ARGS__) |
| #define vremu_vv_u16mf4_m | ( | ... | ) | __riscv_vremu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vremu_vv_u32m1 | ( | ... | ) | __riscv_vremu_vv_u32m1(__VA_ARGS__) |
| #define vremu_vv_u32m1_m | ( | ... | ) | __riscv_vremu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vremu_vv_u32m2 | ( | ... | ) | __riscv_vremu_vv_u32m2(__VA_ARGS__) |
| #define vremu_vv_u32m2_m | ( | ... | ) | __riscv_vremu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vremu_vv_u32m4 | ( | ... | ) | __riscv_vremu_vv_u32m4(__VA_ARGS__) |
| #define vremu_vv_u32m4_m | ( | ... | ) | __riscv_vremu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vremu_vv_u32m8 | ( | ... | ) | __riscv_vremu_vv_u32m8(__VA_ARGS__) |
| #define vremu_vv_u32m8_m | ( | ... | ) | __riscv_vremu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vremu_vv_u32mf2 | ( | ... | ) | __riscv_vremu_vv_u32mf2(__VA_ARGS__) |
| #define vremu_vv_u32mf2_m | ( | ... | ) | __riscv_vremu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vremu_vv_u64m1 | ( | ... | ) | __riscv_vremu_vv_u64m1(__VA_ARGS__) |
| #define vremu_vv_u64m1_m | ( | ... | ) | __riscv_vremu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vremu_vv_u64m2 | ( | ... | ) | __riscv_vremu_vv_u64m2(__VA_ARGS__) |
| #define vremu_vv_u64m2_m | ( | ... | ) | __riscv_vremu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vremu_vv_u64m4 | ( | ... | ) | __riscv_vremu_vv_u64m4(__VA_ARGS__) |
| #define vremu_vv_u64m4_m | ( | ... | ) | __riscv_vremu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vremu_vv_u64m8 | ( | ... | ) | __riscv_vremu_vv_u64m8(__VA_ARGS__) |
| #define vremu_vv_u64m8_m | ( | ... | ) | __riscv_vremu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vremu_vv_u8m1 | ( | ... | ) | __riscv_vremu_vv_u8m1(__VA_ARGS__) |
| #define vremu_vv_u8m1_m | ( | ... | ) | __riscv_vremu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vremu_vv_u8m2 | ( | ... | ) | __riscv_vremu_vv_u8m2(__VA_ARGS__) |
| #define vremu_vv_u8m2_m | ( | ... | ) | __riscv_vremu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vremu_vv_u8m4 | ( | ... | ) | __riscv_vremu_vv_u8m4(__VA_ARGS__) |
| #define vremu_vv_u8m4_m | ( | ... | ) | __riscv_vremu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vremu_vv_u8m8 | ( | ... | ) | __riscv_vremu_vv_u8m8(__VA_ARGS__) |
| #define vremu_vv_u8m8_m | ( | ... | ) | __riscv_vremu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vremu_vv_u8mf2 | ( | ... | ) | __riscv_vremu_vv_u8mf2(__VA_ARGS__) |
| #define vremu_vv_u8mf2_m | ( | ... | ) | __riscv_vremu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vremu_vv_u8mf4 | ( | ... | ) | __riscv_vremu_vv_u8mf4(__VA_ARGS__) |
| #define vremu_vv_u8mf4_m | ( | ... | ) | __riscv_vremu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vremu_vv_u8mf8 | ( | ... | ) | __riscv_vremu_vv_u8mf8(__VA_ARGS__) |
| #define vremu_vv_u8mf8_m | ( | ... | ) | __riscv_vremu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vremu_vx_u16m1 | ( | ... | ) | __riscv_vremu_vx_u16m1(__VA_ARGS__) |
| #define vremu_vx_u16m1_m | ( | ... | ) | __riscv_vremu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vremu_vx_u16m2 | ( | ... | ) | __riscv_vremu_vx_u16m2(__VA_ARGS__) |
| #define vremu_vx_u16m2_m | ( | ... | ) | __riscv_vremu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vremu_vx_u16m4 | ( | ... | ) | __riscv_vremu_vx_u16m4(__VA_ARGS__) |
| #define vremu_vx_u16m4_m | ( | ... | ) | __riscv_vremu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vremu_vx_u16m8 | ( | ... | ) | __riscv_vremu_vx_u16m8(__VA_ARGS__) |
| #define vremu_vx_u16m8_m | ( | ... | ) | __riscv_vremu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vremu_vx_u16mf2 | ( | ... | ) | __riscv_vremu_vx_u16mf2(__VA_ARGS__) |
| #define vremu_vx_u16mf2_m | ( | ... | ) | __riscv_vremu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vremu_vx_u16mf4 | ( | ... | ) | __riscv_vremu_vx_u16mf4(__VA_ARGS__) |
| #define vremu_vx_u16mf4_m | ( | ... | ) | __riscv_vremu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vremu_vx_u32m1 | ( | ... | ) | __riscv_vremu_vx_u32m1(__VA_ARGS__) |
| #define vremu_vx_u32m1_m | ( | ... | ) | __riscv_vremu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vremu_vx_u32m2 | ( | ... | ) | __riscv_vremu_vx_u32m2(__VA_ARGS__) |
| #define vremu_vx_u32m2_m | ( | ... | ) | __riscv_vremu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vremu_vx_u32m4 | ( | ... | ) | __riscv_vremu_vx_u32m4(__VA_ARGS__) |
| #define vremu_vx_u32m4_m | ( | ... | ) | __riscv_vremu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vremu_vx_u32m8 | ( | ... | ) | __riscv_vremu_vx_u32m8(__VA_ARGS__) |
| #define vremu_vx_u32m8_m | ( | ... | ) | __riscv_vremu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vremu_vx_u32mf2 | ( | ... | ) | __riscv_vremu_vx_u32mf2(__VA_ARGS__) |
| #define vremu_vx_u32mf2_m | ( | ... | ) | __riscv_vremu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vremu_vx_u64m1 | ( | ... | ) | __riscv_vremu_vx_u64m1(__VA_ARGS__) |
| #define vremu_vx_u64m1_m | ( | ... | ) | __riscv_vremu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vremu_vx_u64m2 | ( | ... | ) | __riscv_vremu_vx_u64m2(__VA_ARGS__) |
| #define vremu_vx_u64m2_m | ( | ... | ) | __riscv_vremu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vremu_vx_u64m4 | ( | ... | ) | __riscv_vremu_vx_u64m4(__VA_ARGS__) |
| #define vremu_vx_u64m4_m | ( | ... | ) | __riscv_vremu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vremu_vx_u64m8 | ( | ... | ) | __riscv_vremu_vx_u64m8(__VA_ARGS__) |
| #define vremu_vx_u64m8_m | ( | ... | ) | __riscv_vremu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vremu_vx_u8m1 | ( | ... | ) | __riscv_vremu_vx_u8m1(__VA_ARGS__) |
| #define vremu_vx_u8m1_m | ( | ... | ) | __riscv_vremu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vremu_vx_u8m2 | ( | ... | ) | __riscv_vremu_vx_u8m2(__VA_ARGS__) |
| #define vremu_vx_u8m2_m | ( | ... | ) | __riscv_vremu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vremu_vx_u8m4 | ( | ... | ) | __riscv_vremu_vx_u8m4(__VA_ARGS__) |
| #define vremu_vx_u8m4_m | ( | ... | ) | __riscv_vremu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vremu_vx_u8m8 | ( | ... | ) | __riscv_vremu_vx_u8m8(__VA_ARGS__) |
| #define vremu_vx_u8m8_m | ( | ... | ) | __riscv_vremu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vremu_vx_u8mf2 | ( | ... | ) | __riscv_vremu_vx_u8mf2(__VA_ARGS__) |
| #define vremu_vx_u8mf2_m | ( | ... | ) | __riscv_vremu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vremu_vx_u8mf4 | ( | ... | ) | __riscv_vremu_vx_u8mf4(__VA_ARGS__) |
| #define vremu_vx_u8mf4_m | ( | ... | ) | __riscv_vremu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vremu_vx_u8mf8 | ( | ... | ) | __riscv_vremu_vx_u8mf8(__VA_ARGS__) |
| #define vremu_vx_u8mf8_m | ( | ... | ) | __riscv_vremu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vrgather_vv_f16m1 | ( | ... | ) | __riscv_vrgather_vv_f16m1(__VA_ARGS__) |
| #define vrgather_vv_f16m1_m | ( | ... | ) | __riscv_vrgather_vv_f16m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_f16m2 | ( | ... | ) | __riscv_vrgather_vv_f16m2(__VA_ARGS__) |
| #define vrgather_vv_f16m2_m | ( | ... | ) | __riscv_vrgather_vv_f16m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_f16m4 | ( | ... | ) | __riscv_vrgather_vv_f16m4(__VA_ARGS__) |
| #define vrgather_vv_f16m4_m | ( | ... | ) | __riscv_vrgather_vv_f16m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_f16m8 | ( | ... | ) | __riscv_vrgather_vv_f16m8(__VA_ARGS__) |
| #define vrgather_vv_f16m8_m | ( | ... | ) | __riscv_vrgather_vv_f16m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_f16mf2 | ( | ... | ) | __riscv_vrgather_vv_f16mf2(__VA_ARGS__) |
| #define vrgather_vv_f16mf2_m | ( | ... | ) | __riscv_vrgather_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vrgather_vv_f16mf4 | ( | ... | ) | __riscv_vrgather_vv_f16mf4(__VA_ARGS__) |
| #define vrgather_vv_f16mf4_m | ( | ... | ) | __riscv_vrgather_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vrgather_vv_f32m1 | ( | ... | ) | __riscv_vrgather_vv_f32m1(__VA_ARGS__) |
| #define vrgather_vv_f32m1_m | ( | ... | ) | __riscv_vrgather_vv_f32m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_f32m2 | ( | ... | ) | __riscv_vrgather_vv_f32m2(__VA_ARGS__) |
| #define vrgather_vv_f32m2_m | ( | ... | ) | __riscv_vrgather_vv_f32m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_f32m4 | ( | ... | ) | __riscv_vrgather_vv_f32m4(__VA_ARGS__) |
| #define vrgather_vv_f32m4_m | ( | ... | ) | __riscv_vrgather_vv_f32m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_f32m8 | ( | ... | ) | __riscv_vrgather_vv_f32m8(__VA_ARGS__) |
| #define vrgather_vv_f32m8_m | ( | ... | ) | __riscv_vrgather_vv_f32m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_f32mf2 | ( | ... | ) | __riscv_vrgather_vv_f32mf2(__VA_ARGS__) |
| #define vrgather_vv_f32mf2_m | ( | ... | ) | __riscv_vrgather_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vrgather_vv_f64m1 | ( | ... | ) | __riscv_vrgather_vv_f64m1(__VA_ARGS__) |
| #define vrgather_vv_f64m1_m | ( | ... | ) | __riscv_vrgather_vv_f64m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_f64m2 | ( | ... | ) | __riscv_vrgather_vv_f64m2(__VA_ARGS__) |
| #define vrgather_vv_f64m2_m | ( | ... | ) | __riscv_vrgather_vv_f64m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_f64m4 | ( | ... | ) | __riscv_vrgather_vv_f64m4(__VA_ARGS__) |
| #define vrgather_vv_f64m4_m | ( | ... | ) | __riscv_vrgather_vv_f64m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_f64m8 | ( | ... | ) | __riscv_vrgather_vv_f64m8(__VA_ARGS__) |
| #define vrgather_vv_f64m8_m | ( | ... | ) | __riscv_vrgather_vv_f64m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_i16m1 | ( | ... | ) | __riscv_vrgather_vv_i16m1(__VA_ARGS__) |
| #define vrgather_vv_i16m1_m | ( | ... | ) | __riscv_vrgather_vv_i16m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_i16m2 | ( | ... | ) | __riscv_vrgather_vv_i16m2(__VA_ARGS__) |
| #define vrgather_vv_i16m2_m | ( | ... | ) | __riscv_vrgather_vv_i16m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_i16m4 | ( | ... | ) | __riscv_vrgather_vv_i16m4(__VA_ARGS__) |
| #define vrgather_vv_i16m4_m | ( | ... | ) | __riscv_vrgather_vv_i16m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_i16m8 | ( | ... | ) | __riscv_vrgather_vv_i16m8(__VA_ARGS__) |
| #define vrgather_vv_i16m8_m | ( | ... | ) | __riscv_vrgather_vv_i16m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_i16mf2 | ( | ... | ) | __riscv_vrgather_vv_i16mf2(__VA_ARGS__) |
| #define vrgather_vv_i16mf2_m | ( | ... | ) | __riscv_vrgather_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vrgather_vv_i16mf4 | ( | ... | ) | __riscv_vrgather_vv_i16mf4(__VA_ARGS__) |
| #define vrgather_vv_i16mf4_m | ( | ... | ) | __riscv_vrgather_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vrgather_vv_i32m1 | ( | ... | ) | __riscv_vrgather_vv_i32m1(__VA_ARGS__) |
| #define vrgather_vv_i32m1_m | ( | ... | ) | __riscv_vrgather_vv_i32m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_i32m2 | ( | ... | ) | __riscv_vrgather_vv_i32m2(__VA_ARGS__) |
| #define vrgather_vv_i32m2_m | ( | ... | ) | __riscv_vrgather_vv_i32m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_i32m4 | ( | ... | ) | __riscv_vrgather_vv_i32m4(__VA_ARGS__) |
| #define vrgather_vv_i32m4_m | ( | ... | ) | __riscv_vrgather_vv_i32m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_i32m8 | ( | ... | ) | __riscv_vrgather_vv_i32m8(__VA_ARGS__) |
| #define vrgather_vv_i32m8_m | ( | ... | ) | __riscv_vrgather_vv_i32m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_i32mf2 | ( | ... | ) | __riscv_vrgather_vv_i32mf2(__VA_ARGS__) |
| #define vrgather_vv_i32mf2_m | ( | ... | ) | __riscv_vrgather_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vrgather_vv_i64m1 | ( | ... | ) | __riscv_vrgather_vv_i64m1(__VA_ARGS__) |
| #define vrgather_vv_i64m1_m | ( | ... | ) | __riscv_vrgather_vv_i64m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_i64m2 | ( | ... | ) | __riscv_vrgather_vv_i64m2(__VA_ARGS__) |
| #define vrgather_vv_i64m2_m | ( | ... | ) | __riscv_vrgather_vv_i64m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_i64m4 | ( | ... | ) | __riscv_vrgather_vv_i64m4(__VA_ARGS__) |
| #define vrgather_vv_i64m4_m | ( | ... | ) | __riscv_vrgather_vv_i64m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_i64m8 | ( | ... | ) | __riscv_vrgather_vv_i64m8(__VA_ARGS__) |
| #define vrgather_vv_i64m8_m | ( | ... | ) | __riscv_vrgather_vv_i64m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_i8m1 | ( | ... | ) | __riscv_vrgather_vv_i8m1(__VA_ARGS__) |
| #define vrgather_vv_i8m1_m | ( | ... | ) | __riscv_vrgather_vv_i8m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_i8m2 | ( | ... | ) | __riscv_vrgather_vv_i8m2(__VA_ARGS__) |
| #define vrgather_vv_i8m2_m | ( | ... | ) | __riscv_vrgather_vv_i8m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_i8m4 | ( | ... | ) | __riscv_vrgather_vv_i8m4(__VA_ARGS__) |
| #define vrgather_vv_i8m4_m | ( | ... | ) | __riscv_vrgather_vv_i8m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_i8m8 | ( | ... | ) | __riscv_vrgather_vv_i8m8(__VA_ARGS__) |
| #define vrgather_vv_i8m8_m | ( | ... | ) | __riscv_vrgather_vv_i8m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_i8mf2 | ( | ... | ) | __riscv_vrgather_vv_i8mf2(__VA_ARGS__) |
| #define vrgather_vv_i8mf2_m | ( | ... | ) | __riscv_vrgather_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vrgather_vv_i8mf4 | ( | ... | ) | __riscv_vrgather_vv_i8mf4(__VA_ARGS__) |
| #define vrgather_vv_i8mf4_m | ( | ... | ) | __riscv_vrgather_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vrgather_vv_i8mf8 | ( | ... | ) | __riscv_vrgather_vv_i8mf8(__VA_ARGS__) |
| #define vrgather_vv_i8mf8_m | ( | ... | ) | __riscv_vrgather_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vrgather_vv_u16m1 | ( | ... | ) | __riscv_vrgather_vv_u16m1(__VA_ARGS__) |
| #define vrgather_vv_u16m1_m | ( | ... | ) | __riscv_vrgather_vv_u16m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_u16m2 | ( | ... | ) | __riscv_vrgather_vv_u16m2(__VA_ARGS__) |
| #define vrgather_vv_u16m2_m | ( | ... | ) | __riscv_vrgather_vv_u16m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_u16m4 | ( | ... | ) | __riscv_vrgather_vv_u16m4(__VA_ARGS__) |
| #define vrgather_vv_u16m4_m | ( | ... | ) | __riscv_vrgather_vv_u16m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_u16m8 | ( | ... | ) | __riscv_vrgather_vv_u16m8(__VA_ARGS__) |
| #define vrgather_vv_u16m8_m | ( | ... | ) | __riscv_vrgather_vv_u16m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_u16mf2 | ( | ... | ) | __riscv_vrgather_vv_u16mf2(__VA_ARGS__) |
| #define vrgather_vv_u16mf2_m | ( | ... | ) | __riscv_vrgather_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vrgather_vv_u16mf4 | ( | ... | ) | __riscv_vrgather_vv_u16mf4(__VA_ARGS__) |
| #define vrgather_vv_u16mf4_m | ( | ... | ) | __riscv_vrgather_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vrgather_vv_u32m1 | ( | ... | ) | __riscv_vrgather_vv_u32m1(__VA_ARGS__) |
| #define vrgather_vv_u32m1_m | ( | ... | ) | __riscv_vrgather_vv_u32m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_u32m2 | ( | ... | ) | __riscv_vrgather_vv_u32m2(__VA_ARGS__) |
| #define vrgather_vv_u32m2_m | ( | ... | ) | __riscv_vrgather_vv_u32m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_u32m4 | ( | ... | ) | __riscv_vrgather_vv_u32m4(__VA_ARGS__) |
| #define vrgather_vv_u32m4_m | ( | ... | ) | __riscv_vrgather_vv_u32m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_u32m8 | ( | ... | ) | __riscv_vrgather_vv_u32m8(__VA_ARGS__) |
| #define vrgather_vv_u32m8_m | ( | ... | ) | __riscv_vrgather_vv_u32m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_u32mf2 | ( | ... | ) | __riscv_vrgather_vv_u32mf2(__VA_ARGS__) |
| #define vrgather_vv_u32mf2_m | ( | ... | ) | __riscv_vrgather_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vrgather_vv_u64m1 | ( | ... | ) | __riscv_vrgather_vv_u64m1(__VA_ARGS__) |
| #define vrgather_vv_u64m1_m | ( | ... | ) | __riscv_vrgather_vv_u64m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_u64m2 | ( | ... | ) | __riscv_vrgather_vv_u64m2(__VA_ARGS__) |
| #define vrgather_vv_u64m2_m | ( | ... | ) | __riscv_vrgather_vv_u64m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_u64m4 | ( | ... | ) | __riscv_vrgather_vv_u64m4(__VA_ARGS__) |
| #define vrgather_vv_u64m4_m | ( | ... | ) | __riscv_vrgather_vv_u64m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_u64m8 | ( | ... | ) | __riscv_vrgather_vv_u64m8(__VA_ARGS__) |
| #define vrgather_vv_u64m8_m | ( | ... | ) | __riscv_vrgather_vv_u64m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_u8m1 | ( | ... | ) | __riscv_vrgather_vv_u8m1(__VA_ARGS__) |
| #define vrgather_vv_u8m1_m | ( | ... | ) | __riscv_vrgather_vv_u8m1_tumu(__VA_ARGS__) |
| #define vrgather_vv_u8m2 | ( | ... | ) | __riscv_vrgather_vv_u8m2(__VA_ARGS__) |
| #define vrgather_vv_u8m2_m | ( | ... | ) | __riscv_vrgather_vv_u8m2_tumu(__VA_ARGS__) |
| #define vrgather_vv_u8m4 | ( | ... | ) | __riscv_vrgather_vv_u8m4(__VA_ARGS__) |
| #define vrgather_vv_u8m4_m | ( | ... | ) | __riscv_vrgather_vv_u8m4_tumu(__VA_ARGS__) |
| #define vrgather_vv_u8m8 | ( | ... | ) | __riscv_vrgather_vv_u8m8(__VA_ARGS__) |
| #define vrgather_vv_u8m8_m | ( | ... | ) | __riscv_vrgather_vv_u8m8_tumu(__VA_ARGS__) |
| #define vrgather_vv_u8mf2 | ( | ... | ) | __riscv_vrgather_vv_u8mf2(__VA_ARGS__) |
| #define vrgather_vv_u8mf2_m | ( | ... | ) | __riscv_vrgather_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vrgather_vv_u8mf4 | ( | ... | ) | __riscv_vrgather_vv_u8mf4(__VA_ARGS__) |
| #define vrgather_vv_u8mf4_m | ( | ... | ) | __riscv_vrgather_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vrgather_vv_u8mf8 | ( | ... | ) | __riscv_vrgather_vv_u8mf8(__VA_ARGS__) |
| #define vrgather_vv_u8mf8_m | ( | ... | ) | __riscv_vrgather_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vrgather_vx_f16m1 | ( | ... | ) | __riscv_vrgather_vx_f16m1(__VA_ARGS__) |
| #define vrgather_vx_f16m1_m | ( | ... | ) | __riscv_vrgather_vx_f16m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_f16m2 | ( | ... | ) | __riscv_vrgather_vx_f16m2(__VA_ARGS__) |
| #define vrgather_vx_f16m2_m | ( | ... | ) | __riscv_vrgather_vx_f16m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_f16m4 | ( | ... | ) | __riscv_vrgather_vx_f16m4(__VA_ARGS__) |
| #define vrgather_vx_f16m4_m | ( | ... | ) | __riscv_vrgather_vx_f16m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_f16m8 | ( | ... | ) | __riscv_vrgather_vx_f16m8(__VA_ARGS__) |
| #define vrgather_vx_f16m8_m | ( | ... | ) | __riscv_vrgather_vx_f16m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_f16mf2 | ( | ... | ) | __riscv_vrgather_vx_f16mf2(__VA_ARGS__) |
| #define vrgather_vx_f16mf2_m | ( | ... | ) | __riscv_vrgather_vx_f16mf2_tumu(__VA_ARGS__) |
| #define vrgather_vx_f16mf4 | ( | ... | ) | __riscv_vrgather_vx_f16mf4(__VA_ARGS__) |
| #define vrgather_vx_f16mf4_m | ( | ... | ) | __riscv_vrgather_vx_f16mf4_tumu(__VA_ARGS__) |
| #define vrgather_vx_f32m1 | ( | ... | ) | __riscv_vrgather_vx_f32m1(__VA_ARGS__) |
| #define vrgather_vx_f32m1_m | ( | ... | ) | __riscv_vrgather_vx_f32m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_f32m2 | ( | ... | ) | __riscv_vrgather_vx_f32m2(__VA_ARGS__) |
| #define vrgather_vx_f32m2_m | ( | ... | ) | __riscv_vrgather_vx_f32m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_f32m4 | ( | ... | ) | __riscv_vrgather_vx_f32m4(__VA_ARGS__) |
| #define vrgather_vx_f32m4_m | ( | ... | ) | __riscv_vrgather_vx_f32m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_f32m8 | ( | ... | ) | __riscv_vrgather_vx_f32m8(__VA_ARGS__) |
| #define vrgather_vx_f32m8_m | ( | ... | ) | __riscv_vrgather_vx_f32m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_f32mf2 | ( | ... | ) | __riscv_vrgather_vx_f32mf2(__VA_ARGS__) |
| #define vrgather_vx_f32mf2_m | ( | ... | ) | __riscv_vrgather_vx_f32mf2_tumu(__VA_ARGS__) |
| #define vrgather_vx_f64m1 | ( | ... | ) | __riscv_vrgather_vx_f64m1(__VA_ARGS__) |
| #define vrgather_vx_f64m1_m | ( | ... | ) | __riscv_vrgather_vx_f64m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_f64m2 | ( | ... | ) | __riscv_vrgather_vx_f64m2(__VA_ARGS__) |
| #define vrgather_vx_f64m2_m | ( | ... | ) | __riscv_vrgather_vx_f64m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_f64m4 | ( | ... | ) | __riscv_vrgather_vx_f64m4(__VA_ARGS__) |
| #define vrgather_vx_f64m4_m | ( | ... | ) | __riscv_vrgather_vx_f64m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_f64m8 | ( | ... | ) | __riscv_vrgather_vx_f64m8(__VA_ARGS__) |
| #define vrgather_vx_f64m8_m | ( | ... | ) | __riscv_vrgather_vx_f64m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_i16m1 | ( | ... | ) | __riscv_vrgather_vx_i16m1(__VA_ARGS__) |
| #define vrgather_vx_i16m1_m | ( | ... | ) | __riscv_vrgather_vx_i16m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_i16m2 | ( | ... | ) | __riscv_vrgather_vx_i16m2(__VA_ARGS__) |
| #define vrgather_vx_i16m2_m | ( | ... | ) | __riscv_vrgather_vx_i16m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_i16m4 | ( | ... | ) | __riscv_vrgather_vx_i16m4(__VA_ARGS__) |
| #define vrgather_vx_i16m4_m | ( | ... | ) | __riscv_vrgather_vx_i16m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_i16m8 | ( | ... | ) | __riscv_vrgather_vx_i16m8(__VA_ARGS__) |
| #define vrgather_vx_i16m8_m | ( | ... | ) | __riscv_vrgather_vx_i16m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_i16mf2 | ( | ... | ) | __riscv_vrgather_vx_i16mf2(__VA_ARGS__) |
| #define vrgather_vx_i16mf2_m | ( | ... | ) | __riscv_vrgather_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vrgather_vx_i16mf4 | ( | ... | ) | __riscv_vrgather_vx_i16mf4(__VA_ARGS__) |
| #define vrgather_vx_i16mf4_m | ( | ... | ) | __riscv_vrgather_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vrgather_vx_i32m1 | ( | ... | ) | __riscv_vrgather_vx_i32m1(__VA_ARGS__) |
| #define vrgather_vx_i32m1_m | ( | ... | ) | __riscv_vrgather_vx_i32m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_i32m2 | ( | ... | ) | __riscv_vrgather_vx_i32m2(__VA_ARGS__) |
| #define vrgather_vx_i32m2_m | ( | ... | ) | __riscv_vrgather_vx_i32m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_i32m4 | ( | ... | ) | __riscv_vrgather_vx_i32m4(__VA_ARGS__) |
| #define vrgather_vx_i32m4_m | ( | ... | ) | __riscv_vrgather_vx_i32m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_i32m8 | ( | ... | ) | __riscv_vrgather_vx_i32m8(__VA_ARGS__) |
| #define vrgather_vx_i32m8_m | ( | ... | ) | __riscv_vrgather_vx_i32m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_i32mf2 | ( | ... | ) | __riscv_vrgather_vx_i32mf2(__VA_ARGS__) |
| #define vrgather_vx_i32mf2_m | ( | ... | ) | __riscv_vrgather_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vrgather_vx_i64m1 | ( | ... | ) | __riscv_vrgather_vx_i64m1(__VA_ARGS__) |
| #define vrgather_vx_i64m1_m | ( | ... | ) | __riscv_vrgather_vx_i64m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_i64m2 | ( | ... | ) | __riscv_vrgather_vx_i64m2(__VA_ARGS__) |
| #define vrgather_vx_i64m2_m | ( | ... | ) | __riscv_vrgather_vx_i64m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_i64m4 | ( | ... | ) | __riscv_vrgather_vx_i64m4(__VA_ARGS__) |
| #define vrgather_vx_i64m4_m | ( | ... | ) | __riscv_vrgather_vx_i64m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_i64m8 | ( | ... | ) | __riscv_vrgather_vx_i64m8(__VA_ARGS__) |
| #define vrgather_vx_i64m8_m | ( | ... | ) | __riscv_vrgather_vx_i64m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_i8m1 | ( | ... | ) | __riscv_vrgather_vx_i8m1(__VA_ARGS__) |
| #define vrgather_vx_i8m1_m | ( | ... | ) | __riscv_vrgather_vx_i8m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_i8m2 | ( | ... | ) | __riscv_vrgather_vx_i8m2(__VA_ARGS__) |
| #define vrgather_vx_i8m2_m | ( | ... | ) | __riscv_vrgather_vx_i8m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_i8m4 | ( | ... | ) | __riscv_vrgather_vx_i8m4(__VA_ARGS__) |
| #define vrgather_vx_i8m4_m | ( | ... | ) | __riscv_vrgather_vx_i8m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_i8m8 | ( | ... | ) | __riscv_vrgather_vx_i8m8(__VA_ARGS__) |
| #define vrgather_vx_i8m8_m | ( | ... | ) | __riscv_vrgather_vx_i8m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_i8mf2 | ( | ... | ) | __riscv_vrgather_vx_i8mf2(__VA_ARGS__) |
| #define vrgather_vx_i8mf2_m | ( | ... | ) | __riscv_vrgather_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vrgather_vx_i8mf4 | ( | ... | ) | __riscv_vrgather_vx_i8mf4(__VA_ARGS__) |
| #define vrgather_vx_i8mf4_m | ( | ... | ) | __riscv_vrgather_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vrgather_vx_i8mf8 | ( | ... | ) | __riscv_vrgather_vx_i8mf8(__VA_ARGS__) |
| #define vrgather_vx_i8mf8_m | ( | ... | ) | __riscv_vrgather_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vrgather_vx_u16m1 | ( | ... | ) | __riscv_vrgather_vx_u16m1(__VA_ARGS__) |
| #define vrgather_vx_u16m1_m | ( | ... | ) | __riscv_vrgather_vx_u16m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_u16m2 | ( | ... | ) | __riscv_vrgather_vx_u16m2(__VA_ARGS__) |
| #define vrgather_vx_u16m2_m | ( | ... | ) | __riscv_vrgather_vx_u16m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_u16m4 | ( | ... | ) | __riscv_vrgather_vx_u16m4(__VA_ARGS__) |
| #define vrgather_vx_u16m4_m | ( | ... | ) | __riscv_vrgather_vx_u16m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_u16m8 | ( | ... | ) | __riscv_vrgather_vx_u16m8(__VA_ARGS__) |
| #define vrgather_vx_u16m8_m | ( | ... | ) | __riscv_vrgather_vx_u16m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_u16mf2 | ( | ... | ) | __riscv_vrgather_vx_u16mf2(__VA_ARGS__) |
| #define vrgather_vx_u16mf2_m | ( | ... | ) | __riscv_vrgather_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vrgather_vx_u16mf4 | ( | ... | ) | __riscv_vrgather_vx_u16mf4(__VA_ARGS__) |
| #define vrgather_vx_u16mf4_m | ( | ... | ) | __riscv_vrgather_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vrgather_vx_u32m1 | ( | ... | ) | __riscv_vrgather_vx_u32m1(__VA_ARGS__) |
| #define vrgather_vx_u32m1_m | ( | ... | ) | __riscv_vrgather_vx_u32m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_u32m2 | ( | ... | ) | __riscv_vrgather_vx_u32m2(__VA_ARGS__) |
| #define vrgather_vx_u32m2_m | ( | ... | ) | __riscv_vrgather_vx_u32m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_u32m4 | ( | ... | ) | __riscv_vrgather_vx_u32m4(__VA_ARGS__) |
| #define vrgather_vx_u32m4_m | ( | ... | ) | __riscv_vrgather_vx_u32m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_u32m8 | ( | ... | ) | __riscv_vrgather_vx_u32m8(__VA_ARGS__) |
| #define vrgather_vx_u32m8_m | ( | ... | ) | __riscv_vrgather_vx_u32m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_u32mf2 | ( | ... | ) | __riscv_vrgather_vx_u32mf2(__VA_ARGS__) |
| #define vrgather_vx_u32mf2_m | ( | ... | ) | __riscv_vrgather_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vrgather_vx_u64m1 | ( | ... | ) | __riscv_vrgather_vx_u64m1(__VA_ARGS__) |
| #define vrgather_vx_u64m1_m | ( | ... | ) | __riscv_vrgather_vx_u64m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_u64m2 | ( | ... | ) | __riscv_vrgather_vx_u64m2(__VA_ARGS__) |
| #define vrgather_vx_u64m2_m | ( | ... | ) | __riscv_vrgather_vx_u64m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_u64m4 | ( | ... | ) | __riscv_vrgather_vx_u64m4(__VA_ARGS__) |
| #define vrgather_vx_u64m4_m | ( | ... | ) | __riscv_vrgather_vx_u64m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_u64m8 | ( | ... | ) | __riscv_vrgather_vx_u64m8(__VA_ARGS__) |
| #define vrgather_vx_u64m8_m | ( | ... | ) | __riscv_vrgather_vx_u64m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_u8m1 | ( | ... | ) | __riscv_vrgather_vx_u8m1(__VA_ARGS__) |
| #define vrgather_vx_u8m1_m | ( | ... | ) | __riscv_vrgather_vx_u8m1_tumu(__VA_ARGS__) |
| #define vrgather_vx_u8m2 | ( | ... | ) | __riscv_vrgather_vx_u8m2(__VA_ARGS__) |
| #define vrgather_vx_u8m2_m | ( | ... | ) | __riscv_vrgather_vx_u8m2_tumu(__VA_ARGS__) |
| #define vrgather_vx_u8m4 | ( | ... | ) | __riscv_vrgather_vx_u8m4(__VA_ARGS__) |
| #define vrgather_vx_u8m4_m | ( | ... | ) | __riscv_vrgather_vx_u8m4_tumu(__VA_ARGS__) |
| #define vrgather_vx_u8m8 | ( | ... | ) | __riscv_vrgather_vx_u8m8(__VA_ARGS__) |
| #define vrgather_vx_u8m8_m | ( | ... | ) | __riscv_vrgather_vx_u8m8_tumu(__VA_ARGS__) |
| #define vrgather_vx_u8mf2 | ( | ... | ) | __riscv_vrgather_vx_u8mf2(__VA_ARGS__) |
| #define vrgather_vx_u8mf2_m | ( | ... | ) | __riscv_vrgather_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vrgather_vx_u8mf4 | ( | ... | ) | __riscv_vrgather_vx_u8mf4(__VA_ARGS__) |
| #define vrgather_vx_u8mf4_m | ( | ... | ) | __riscv_vrgather_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vrgather_vx_u8mf8 | ( | ... | ) | __riscv_vrgather_vx_u8mf8(__VA_ARGS__) |
| #define vrgather_vx_u8mf8_m | ( | ... | ) | __riscv_vrgather_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f16m1 | ( | ... | ) | __riscv_vrgatherei16_vv_f16m1(__VA_ARGS__) |
| #define vrgatherei16_vv_f16m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_f16m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f16m2 | ( | ... | ) | __riscv_vrgatherei16_vv_f16m2(__VA_ARGS__) |
| #define vrgatherei16_vv_f16m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_f16m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f16m4 | ( | ... | ) | __riscv_vrgatherei16_vv_f16m4(__VA_ARGS__) |
| #define vrgatherei16_vv_f16m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_f16m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f16m8 | ( | ... | ) | __riscv_vrgatherei16_vv_f16m8(__VA_ARGS__) |
| #define vrgatherei16_vv_f16m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_f16m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f16mf2 | ( | ... | ) | __riscv_vrgatherei16_vv_f16mf2(__VA_ARGS__) |
| #define vrgatherei16_vv_f16mf2_m | ( | ... | ) | __riscv_vrgatherei16_vv_f16mf2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f16mf4 | ( | ... | ) | __riscv_vrgatherei16_vv_f16mf4(__VA_ARGS__) |
| #define vrgatherei16_vv_f16mf4_m | ( | ... | ) | __riscv_vrgatherei16_vv_f16mf4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f32m1 | ( | ... | ) | __riscv_vrgatherei16_vv_f32m1(__VA_ARGS__) |
| #define vrgatherei16_vv_f32m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_f32m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f32m2 | ( | ... | ) | __riscv_vrgatherei16_vv_f32m2(__VA_ARGS__) |
| #define vrgatherei16_vv_f32m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_f32m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f32m4 | ( | ... | ) | __riscv_vrgatherei16_vv_f32m4(__VA_ARGS__) |
| #define vrgatherei16_vv_f32m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_f32m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f32m8 | ( | ... | ) | __riscv_vrgatherei16_vv_f32m8(__VA_ARGS__) |
| #define vrgatherei16_vv_f32m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_f32m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f32mf2 | ( | ... | ) | __riscv_vrgatherei16_vv_f32mf2(__VA_ARGS__) |
| #define vrgatherei16_vv_f32mf2_m | ( | ... | ) | __riscv_vrgatherei16_vv_f32mf2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f64m1 | ( | ... | ) | __riscv_vrgatherei16_vv_f64m1(__VA_ARGS__) |
| #define vrgatherei16_vv_f64m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_f64m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f64m2 | ( | ... | ) | __riscv_vrgatherei16_vv_f64m2(__VA_ARGS__) |
| #define vrgatherei16_vv_f64m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_f64m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f64m4 | ( | ... | ) | __riscv_vrgatherei16_vv_f64m4(__VA_ARGS__) |
| #define vrgatherei16_vv_f64m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_f64m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_f64m8 | ( | ... | ) | __riscv_vrgatherei16_vv_f64m8(__VA_ARGS__) |
| #define vrgatherei16_vv_f64m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_f64m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i16m1 | ( | ... | ) | __riscv_vrgatherei16_vv_i16m1(__VA_ARGS__) |
| #define vrgatherei16_vv_i16m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_i16m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i16m2 | ( | ... | ) | __riscv_vrgatherei16_vv_i16m2(__VA_ARGS__) |
| #define vrgatherei16_vv_i16m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_i16m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i16m4 | ( | ... | ) | __riscv_vrgatherei16_vv_i16m4(__VA_ARGS__) |
| #define vrgatherei16_vv_i16m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_i16m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i16m8 | ( | ... | ) | __riscv_vrgatherei16_vv_i16m8(__VA_ARGS__) |
| #define vrgatherei16_vv_i16m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_i16m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i16mf2 | ( | ... | ) | __riscv_vrgatherei16_vv_i16mf2(__VA_ARGS__) |
| #define vrgatherei16_vv_i16mf2_m | ( | ... | ) | __riscv_vrgatherei16_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i16mf4 | ( | ... | ) | __riscv_vrgatherei16_vv_i16mf4(__VA_ARGS__) |
| #define vrgatherei16_vv_i16mf4_m | ( | ... | ) | __riscv_vrgatherei16_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i32m1 | ( | ... | ) | __riscv_vrgatherei16_vv_i32m1(__VA_ARGS__) |
| #define vrgatherei16_vv_i32m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_i32m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i32m2 | ( | ... | ) | __riscv_vrgatherei16_vv_i32m2(__VA_ARGS__) |
| #define vrgatherei16_vv_i32m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_i32m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i32m4 | ( | ... | ) | __riscv_vrgatherei16_vv_i32m4(__VA_ARGS__) |
| #define vrgatherei16_vv_i32m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_i32m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i32m8 | ( | ... | ) | __riscv_vrgatherei16_vv_i32m8(__VA_ARGS__) |
| #define vrgatherei16_vv_i32m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_i32m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i32mf2 | ( | ... | ) | __riscv_vrgatherei16_vv_i32mf2(__VA_ARGS__) |
| #define vrgatherei16_vv_i32mf2_m | ( | ... | ) | __riscv_vrgatherei16_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i64m1 | ( | ... | ) | __riscv_vrgatherei16_vv_i64m1(__VA_ARGS__) |
| #define vrgatherei16_vv_i64m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_i64m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i64m2 | ( | ... | ) | __riscv_vrgatherei16_vv_i64m2(__VA_ARGS__) |
| #define vrgatherei16_vv_i64m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_i64m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i64m4 | ( | ... | ) | __riscv_vrgatherei16_vv_i64m4(__VA_ARGS__) |
| #define vrgatherei16_vv_i64m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_i64m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i64m8 | ( | ... | ) | __riscv_vrgatherei16_vv_i64m8(__VA_ARGS__) |
| #define vrgatherei16_vv_i64m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_i64m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i8m1 | ( | ... | ) | __riscv_vrgatherei16_vv_i8m1(__VA_ARGS__) |
| #define vrgatherei16_vv_i8m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_i8m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i8m2 | ( | ... | ) | __riscv_vrgatherei16_vv_i8m2(__VA_ARGS__) |
| #define vrgatherei16_vv_i8m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_i8m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i8m4 | ( | ... | ) | __riscv_vrgatherei16_vv_i8m4(__VA_ARGS__) |
| #define vrgatherei16_vv_i8m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_i8m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i8mf2 | ( | ... | ) | __riscv_vrgatherei16_vv_i8mf2(__VA_ARGS__) |
| #define vrgatherei16_vv_i8mf2_m | ( | ... | ) | __riscv_vrgatherei16_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i8mf4 | ( | ... | ) | __riscv_vrgatherei16_vv_i8mf4(__VA_ARGS__) |
| #define vrgatherei16_vv_i8mf4_m | ( | ... | ) | __riscv_vrgatherei16_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_i8mf8 | ( | ... | ) | __riscv_vrgatherei16_vv_i8mf8(__VA_ARGS__) |
| #define vrgatherei16_vv_i8mf8_m | ( | ... | ) | __riscv_vrgatherei16_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u16m1 | ( | ... | ) | __riscv_vrgatherei16_vv_u16m1(__VA_ARGS__) |
| #define vrgatherei16_vv_u16m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_u16m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u16m2 | ( | ... | ) | __riscv_vrgatherei16_vv_u16m2(__VA_ARGS__) |
| #define vrgatherei16_vv_u16m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_u16m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u16m4 | ( | ... | ) | __riscv_vrgatherei16_vv_u16m4(__VA_ARGS__) |
| #define vrgatherei16_vv_u16m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_u16m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u16m8 | ( | ... | ) | __riscv_vrgatherei16_vv_u16m8(__VA_ARGS__) |
| #define vrgatherei16_vv_u16m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_u16m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u16mf2 | ( | ... | ) | __riscv_vrgatherei16_vv_u16mf2(__VA_ARGS__) |
| #define vrgatherei16_vv_u16mf2_m | ( | ... | ) | __riscv_vrgatherei16_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u16mf4 | ( | ... | ) | __riscv_vrgatherei16_vv_u16mf4(__VA_ARGS__) |
| #define vrgatherei16_vv_u16mf4_m | ( | ... | ) | __riscv_vrgatherei16_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u32m1 | ( | ... | ) | __riscv_vrgatherei16_vv_u32m1(__VA_ARGS__) |
| #define vrgatherei16_vv_u32m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_u32m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u32m2 | ( | ... | ) | __riscv_vrgatherei16_vv_u32m2(__VA_ARGS__) |
| #define vrgatherei16_vv_u32m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_u32m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u32m4 | ( | ... | ) | __riscv_vrgatherei16_vv_u32m4(__VA_ARGS__) |
| #define vrgatherei16_vv_u32m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_u32m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u32m8 | ( | ... | ) | __riscv_vrgatherei16_vv_u32m8(__VA_ARGS__) |
| #define vrgatherei16_vv_u32m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_u32m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u32mf2 | ( | ... | ) | __riscv_vrgatherei16_vv_u32mf2(__VA_ARGS__) |
| #define vrgatherei16_vv_u32mf2_m | ( | ... | ) | __riscv_vrgatherei16_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u64m1 | ( | ... | ) | __riscv_vrgatherei16_vv_u64m1(__VA_ARGS__) |
| #define vrgatherei16_vv_u64m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_u64m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u64m2 | ( | ... | ) | __riscv_vrgatherei16_vv_u64m2(__VA_ARGS__) |
| #define vrgatherei16_vv_u64m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_u64m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u64m4 | ( | ... | ) | __riscv_vrgatherei16_vv_u64m4(__VA_ARGS__) |
| #define vrgatherei16_vv_u64m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_u64m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u64m8 | ( | ... | ) | __riscv_vrgatherei16_vv_u64m8(__VA_ARGS__) |
| #define vrgatherei16_vv_u64m8_m | ( | ... | ) | __riscv_vrgatherei16_vv_u64m8_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u8m1 | ( | ... | ) | __riscv_vrgatherei16_vv_u8m1(__VA_ARGS__) |
| #define vrgatherei16_vv_u8m1_m | ( | ... | ) | __riscv_vrgatherei16_vv_u8m1_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u8m2 | ( | ... | ) | __riscv_vrgatherei16_vv_u8m2(__VA_ARGS__) |
| #define vrgatherei16_vv_u8m2_m | ( | ... | ) | __riscv_vrgatherei16_vv_u8m2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u8m4 | ( | ... | ) | __riscv_vrgatherei16_vv_u8m4(__VA_ARGS__) |
| #define vrgatherei16_vv_u8m4_m | ( | ... | ) | __riscv_vrgatherei16_vv_u8m4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u8mf2 | ( | ... | ) | __riscv_vrgatherei16_vv_u8mf2(__VA_ARGS__) |
| #define vrgatherei16_vv_u8mf2_m | ( | ... | ) | __riscv_vrgatherei16_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u8mf4 | ( | ... | ) | __riscv_vrgatherei16_vv_u8mf4(__VA_ARGS__) |
| #define vrgatherei16_vv_u8mf4_m | ( | ... | ) | __riscv_vrgatherei16_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vrgatherei16_vv_u8mf8 | ( | ... | ) | __riscv_vrgatherei16_vv_u8mf8(__VA_ARGS__) |
| #define vrgatherei16_vv_u8mf8_m | ( | ... | ) | __riscv_vrgatherei16_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vrsub_vx_i16m1 | ( | ... | ) | __riscv_vrsub_vx_i16m1(__VA_ARGS__) |
| #define vrsub_vx_i16m1_m | ( | ... | ) | __riscv_vrsub_vx_i16m1_tumu(__VA_ARGS__) |
| #define vrsub_vx_i16m2 | ( | ... | ) | __riscv_vrsub_vx_i16m2(__VA_ARGS__) |
| #define vrsub_vx_i16m2_m | ( | ... | ) | __riscv_vrsub_vx_i16m2_tumu(__VA_ARGS__) |
| #define vrsub_vx_i16m4 | ( | ... | ) | __riscv_vrsub_vx_i16m4(__VA_ARGS__) |
| #define vrsub_vx_i16m4_m | ( | ... | ) | __riscv_vrsub_vx_i16m4_tumu(__VA_ARGS__) |
| #define vrsub_vx_i16m8 | ( | ... | ) | __riscv_vrsub_vx_i16m8(__VA_ARGS__) |
| #define vrsub_vx_i16m8_m | ( | ... | ) | __riscv_vrsub_vx_i16m8_tumu(__VA_ARGS__) |
| #define vrsub_vx_i16mf2 | ( | ... | ) | __riscv_vrsub_vx_i16mf2(__VA_ARGS__) |
| #define vrsub_vx_i16mf2_m | ( | ... | ) | __riscv_vrsub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vrsub_vx_i16mf4 | ( | ... | ) | __riscv_vrsub_vx_i16mf4(__VA_ARGS__) |
| #define vrsub_vx_i16mf4_m | ( | ... | ) | __riscv_vrsub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vrsub_vx_i32m1 | ( | ... | ) | __riscv_vrsub_vx_i32m1(__VA_ARGS__) |
| #define vrsub_vx_i32m1_m | ( | ... | ) | __riscv_vrsub_vx_i32m1_tumu(__VA_ARGS__) |
| #define vrsub_vx_i32m2 | ( | ... | ) | __riscv_vrsub_vx_i32m2(__VA_ARGS__) |
| #define vrsub_vx_i32m2_m | ( | ... | ) | __riscv_vrsub_vx_i32m2_tumu(__VA_ARGS__) |
| #define vrsub_vx_i32m4 | ( | ... | ) | __riscv_vrsub_vx_i32m4(__VA_ARGS__) |
| #define vrsub_vx_i32m4_m | ( | ... | ) | __riscv_vrsub_vx_i32m4_tumu(__VA_ARGS__) |
| #define vrsub_vx_i32m8 | ( | ... | ) | __riscv_vrsub_vx_i32m8(__VA_ARGS__) |
| #define vrsub_vx_i32m8_m | ( | ... | ) | __riscv_vrsub_vx_i32m8_tumu(__VA_ARGS__) |
| #define vrsub_vx_i32mf2 | ( | ... | ) | __riscv_vrsub_vx_i32mf2(__VA_ARGS__) |
| #define vrsub_vx_i32mf2_m | ( | ... | ) | __riscv_vrsub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vrsub_vx_i64m1 | ( | ... | ) | __riscv_vrsub_vx_i64m1(__VA_ARGS__) |
| #define vrsub_vx_i64m1_m | ( | ... | ) | __riscv_vrsub_vx_i64m1_tumu(__VA_ARGS__) |
| #define vrsub_vx_i64m2 | ( | ... | ) | __riscv_vrsub_vx_i64m2(__VA_ARGS__) |
| #define vrsub_vx_i64m2_m | ( | ... | ) | __riscv_vrsub_vx_i64m2_tumu(__VA_ARGS__) |
| #define vrsub_vx_i64m4 | ( | ... | ) | __riscv_vrsub_vx_i64m4(__VA_ARGS__) |
| #define vrsub_vx_i64m4_m | ( | ... | ) | __riscv_vrsub_vx_i64m4_tumu(__VA_ARGS__) |
| #define vrsub_vx_i64m8 | ( | ... | ) | __riscv_vrsub_vx_i64m8(__VA_ARGS__) |
| #define vrsub_vx_i64m8_m | ( | ... | ) | __riscv_vrsub_vx_i64m8_tumu(__VA_ARGS__) |
| #define vrsub_vx_i8m1 | ( | ... | ) | __riscv_vrsub_vx_i8m1(__VA_ARGS__) |
| #define vrsub_vx_i8m1_m | ( | ... | ) | __riscv_vrsub_vx_i8m1_tumu(__VA_ARGS__) |
| #define vrsub_vx_i8m2 | ( | ... | ) | __riscv_vrsub_vx_i8m2(__VA_ARGS__) |
| #define vrsub_vx_i8m2_m | ( | ... | ) | __riscv_vrsub_vx_i8m2_tumu(__VA_ARGS__) |
| #define vrsub_vx_i8m4 | ( | ... | ) | __riscv_vrsub_vx_i8m4(__VA_ARGS__) |
| #define vrsub_vx_i8m4_m | ( | ... | ) | __riscv_vrsub_vx_i8m4_tumu(__VA_ARGS__) |
| #define vrsub_vx_i8m8 | ( | ... | ) | __riscv_vrsub_vx_i8m8(__VA_ARGS__) |
| #define vrsub_vx_i8m8_m | ( | ... | ) | __riscv_vrsub_vx_i8m8_tumu(__VA_ARGS__) |
| #define vrsub_vx_i8mf2 | ( | ... | ) | __riscv_vrsub_vx_i8mf2(__VA_ARGS__) |
| #define vrsub_vx_i8mf2_m | ( | ... | ) | __riscv_vrsub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vrsub_vx_i8mf4 | ( | ... | ) | __riscv_vrsub_vx_i8mf4(__VA_ARGS__) |
| #define vrsub_vx_i8mf4_m | ( | ... | ) | __riscv_vrsub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vrsub_vx_i8mf8 | ( | ... | ) | __riscv_vrsub_vx_i8mf8(__VA_ARGS__) |
| #define vrsub_vx_i8mf8_m | ( | ... | ) | __riscv_vrsub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vrsub_vx_u16m1 | ( | ... | ) | __riscv_vrsub_vx_u16m1(__VA_ARGS__) |
| #define vrsub_vx_u16m1_m | ( | ... | ) | __riscv_vrsub_vx_u16m1_tumu(__VA_ARGS__) |
| #define vrsub_vx_u16m2 | ( | ... | ) | __riscv_vrsub_vx_u16m2(__VA_ARGS__) |
| #define vrsub_vx_u16m2_m | ( | ... | ) | __riscv_vrsub_vx_u16m2_tumu(__VA_ARGS__) |
| #define vrsub_vx_u16m4 | ( | ... | ) | __riscv_vrsub_vx_u16m4(__VA_ARGS__) |
| #define vrsub_vx_u16m4_m | ( | ... | ) | __riscv_vrsub_vx_u16m4_tumu(__VA_ARGS__) |
| #define vrsub_vx_u16m8 | ( | ... | ) | __riscv_vrsub_vx_u16m8(__VA_ARGS__) |
| #define vrsub_vx_u16m8_m | ( | ... | ) | __riscv_vrsub_vx_u16m8_tumu(__VA_ARGS__) |
| #define vrsub_vx_u16mf2 | ( | ... | ) | __riscv_vrsub_vx_u16mf2(__VA_ARGS__) |
| #define vrsub_vx_u16mf2_m | ( | ... | ) | __riscv_vrsub_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vrsub_vx_u16mf4 | ( | ... | ) | __riscv_vrsub_vx_u16mf4(__VA_ARGS__) |
| #define vrsub_vx_u16mf4_m | ( | ... | ) | __riscv_vrsub_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vrsub_vx_u32m1 | ( | ... | ) | __riscv_vrsub_vx_u32m1(__VA_ARGS__) |
| #define vrsub_vx_u32m1_m | ( | ... | ) | __riscv_vrsub_vx_u32m1_tumu(__VA_ARGS__) |
| #define vrsub_vx_u32m2 | ( | ... | ) | __riscv_vrsub_vx_u32m2(__VA_ARGS__) |
| #define vrsub_vx_u32m2_m | ( | ... | ) | __riscv_vrsub_vx_u32m2_tumu(__VA_ARGS__) |
| #define vrsub_vx_u32m4 | ( | ... | ) | __riscv_vrsub_vx_u32m4(__VA_ARGS__) |
| #define vrsub_vx_u32m4_m | ( | ... | ) | __riscv_vrsub_vx_u32m4_tumu(__VA_ARGS__) |
| #define vrsub_vx_u32m8 | ( | ... | ) | __riscv_vrsub_vx_u32m8(__VA_ARGS__) |
| #define vrsub_vx_u32m8_m | ( | ... | ) | __riscv_vrsub_vx_u32m8_tumu(__VA_ARGS__) |
| #define vrsub_vx_u32mf2 | ( | ... | ) | __riscv_vrsub_vx_u32mf2(__VA_ARGS__) |
| #define vrsub_vx_u32mf2_m | ( | ... | ) | __riscv_vrsub_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vrsub_vx_u64m1 | ( | ... | ) | __riscv_vrsub_vx_u64m1(__VA_ARGS__) |
| #define vrsub_vx_u64m1_m | ( | ... | ) | __riscv_vrsub_vx_u64m1_tumu(__VA_ARGS__) |
| #define vrsub_vx_u64m2 | ( | ... | ) | __riscv_vrsub_vx_u64m2(__VA_ARGS__) |
| #define vrsub_vx_u64m2_m | ( | ... | ) | __riscv_vrsub_vx_u64m2_tumu(__VA_ARGS__) |
| #define vrsub_vx_u64m4 | ( | ... | ) | __riscv_vrsub_vx_u64m4(__VA_ARGS__) |
| #define vrsub_vx_u64m4_m | ( | ... | ) | __riscv_vrsub_vx_u64m4_tumu(__VA_ARGS__) |
| #define vrsub_vx_u64m8 | ( | ... | ) | __riscv_vrsub_vx_u64m8(__VA_ARGS__) |
| #define vrsub_vx_u64m8_m | ( | ... | ) | __riscv_vrsub_vx_u64m8_tumu(__VA_ARGS__) |
| #define vrsub_vx_u8m1 | ( | ... | ) | __riscv_vrsub_vx_u8m1(__VA_ARGS__) |
| #define vrsub_vx_u8m1_m | ( | ... | ) | __riscv_vrsub_vx_u8m1_tumu(__VA_ARGS__) |
| #define vrsub_vx_u8m2 | ( | ... | ) | __riscv_vrsub_vx_u8m2(__VA_ARGS__) |
| #define vrsub_vx_u8m2_m | ( | ... | ) | __riscv_vrsub_vx_u8m2_tumu(__VA_ARGS__) |
| #define vrsub_vx_u8m4 | ( | ... | ) | __riscv_vrsub_vx_u8m4(__VA_ARGS__) |
| #define vrsub_vx_u8m4_m | ( | ... | ) | __riscv_vrsub_vx_u8m4_tumu(__VA_ARGS__) |
| #define vrsub_vx_u8m8 | ( | ... | ) | __riscv_vrsub_vx_u8m8(__VA_ARGS__) |
| #define vrsub_vx_u8m8_m | ( | ... | ) | __riscv_vrsub_vx_u8m8_tumu(__VA_ARGS__) |
| #define vrsub_vx_u8mf2 | ( | ... | ) | __riscv_vrsub_vx_u8mf2(__VA_ARGS__) |
| #define vrsub_vx_u8mf2_m | ( | ... | ) | __riscv_vrsub_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vrsub_vx_u8mf4 | ( | ... | ) | __riscv_vrsub_vx_u8mf4(__VA_ARGS__) |
| #define vrsub_vx_u8mf4_m | ( | ... | ) | __riscv_vrsub_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vrsub_vx_u8mf8 | ( | ... | ) | __riscv_vrsub_vx_u8mf8(__VA_ARGS__) |
| #define vrsub_vx_u8mf8_m | ( | ... | ) | __riscv_vrsub_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vsadd_vv_i16m1 | ( | ... | ) | __riscv_vsadd_vv_i16m1(__VA_ARGS__) |
| #define vsadd_vv_i16m1_m | ( | ... | ) | __riscv_vsadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define vsadd_vv_i16m2 | ( | ... | ) | __riscv_vsadd_vv_i16m2(__VA_ARGS__) |
| #define vsadd_vv_i16m2_m | ( | ... | ) | __riscv_vsadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define vsadd_vv_i16m4 | ( | ... | ) | __riscv_vsadd_vv_i16m4(__VA_ARGS__) |
| #define vsadd_vv_i16m4_m | ( | ... | ) | __riscv_vsadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define vsadd_vv_i16m8 | ( | ... | ) | __riscv_vsadd_vv_i16m8(__VA_ARGS__) |
| #define vsadd_vv_i16m8_m | ( | ... | ) | __riscv_vsadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define vsadd_vv_i16mf2 | ( | ... | ) | __riscv_vsadd_vv_i16mf2(__VA_ARGS__) |
| #define vsadd_vv_i16mf2_m | ( | ... | ) | __riscv_vsadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vsadd_vv_i16mf4 | ( | ... | ) | __riscv_vsadd_vv_i16mf4(__VA_ARGS__) |
| #define vsadd_vv_i16mf4_m | ( | ... | ) | __riscv_vsadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vsadd_vv_i32m1 | ( | ... | ) | __riscv_vsadd_vv_i32m1(__VA_ARGS__) |
| #define vsadd_vv_i32m1_m | ( | ... | ) | __riscv_vsadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define vsadd_vv_i32m2 | ( | ... | ) | __riscv_vsadd_vv_i32m2(__VA_ARGS__) |
| #define vsadd_vv_i32m2_m | ( | ... | ) | __riscv_vsadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define vsadd_vv_i32m4 | ( | ... | ) | __riscv_vsadd_vv_i32m4(__VA_ARGS__) |
| #define vsadd_vv_i32m4_m | ( | ... | ) | __riscv_vsadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define vsadd_vv_i32m8 | ( | ... | ) | __riscv_vsadd_vv_i32m8(__VA_ARGS__) |
| #define vsadd_vv_i32m8_m | ( | ... | ) | __riscv_vsadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define vsadd_vv_i32mf2 | ( | ... | ) | __riscv_vsadd_vv_i32mf2(__VA_ARGS__) |
| #define vsadd_vv_i32mf2_m | ( | ... | ) | __riscv_vsadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vsadd_vv_i64m1 | ( | ... | ) | __riscv_vsadd_vv_i64m1(__VA_ARGS__) |
| #define vsadd_vv_i64m1_m | ( | ... | ) | __riscv_vsadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define vsadd_vv_i64m2 | ( | ... | ) | __riscv_vsadd_vv_i64m2(__VA_ARGS__) |
| #define vsadd_vv_i64m2_m | ( | ... | ) | __riscv_vsadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define vsadd_vv_i64m4 | ( | ... | ) | __riscv_vsadd_vv_i64m4(__VA_ARGS__) |
| #define vsadd_vv_i64m4_m | ( | ... | ) | __riscv_vsadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define vsadd_vv_i64m8 | ( | ... | ) | __riscv_vsadd_vv_i64m8(__VA_ARGS__) |
| #define vsadd_vv_i64m8_m | ( | ... | ) | __riscv_vsadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define vsadd_vv_i8m1 | ( | ... | ) | __riscv_vsadd_vv_i8m1(__VA_ARGS__) |
| #define vsadd_vv_i8m1_m | ( | ... | ) | __riscv_vsadd_vv_i8m1_tumu(__VA_ARGS__) |
| #define vsadd_vv_i8m2 | ( | ... | ) | __riscv_vsadd_vv_i8m2(__VA_ARGS__) |
| #define vsadd_vv_i8m2_m | ( | ... | ) | __riscv_vsadd_vv_i8m2_tumu(__VA_ARGS__) |
| #define vsadd_vv_i8m4 | ( | ... | ) | __riscv_vsadd_vv_i8m4(__VA_ARGS__) |
| #define vsadd_vv_i8m4_m | ( | ... | ) | __riscv_vsadd_vv_i8m4_tumu(__VA_ARGS__) |
| #define vsadd_vv_i8m8 | ( | ... | ) | __riscv_vsadd_vv_i8m8(__VA_ARGS__) |
| #define vsadd_vv_i8m8_m | ( | ... | ) | __riscv_vsadd_vv_i8m8_tumu(__VA_ARGS__) |
| #define vsadd_vv_i8mf2 | ( | ... | ) | __riscv_vsadd_vv_i8mf2(__VA_ARGS__) |
| #define vsadd_vv_i8mf2_m | ( | ... | ) | __riscv_vsadd_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vsadd_vv_i8mf4 | ( | ... | ) | __riscv_vsadd_vv_i8mf4(__VA_ARGS__) |
| #define vsadd_vv_i8mf4_m | ( | ... | ) | __riscv_vsadd_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vsadd_vv_i8mf8 | ( | ... | ) | __riscv_vsadd_vv_i8mf8(__VA_ARGS__) |
| #define vsadd_vv_i8mf8_m | ( | ... | ) | __riscv_vsadd_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vsadd_vx_i16m1 | ( | ... | ) | __riscv_vsadd_vx_i16m1(__VA_ARGS__) |
| #define vsadd_vx_i16m1_m | ( | ... | ) | __riscv_vsadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define vsadd_vx_i16m2 | ( | ... | ) | __riscv_vsadd_vx_i16m2(__VA_ARGS__) |
| #define vsadd_vx_i16m2_m | ( | ... | ) | __riscv_vsadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define vsadd_vx_i16m4 | ( | ... | ) | __riscv_vsadd_vx_i16m4(__VA_ARGS__) |
| #define vsadd_vx_i16m4_m | ( | ... | ) | __riscv_vsadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define vsadd_vx_i16m8 | ( | ... | ) | __riscv_vsadd_vx_i16m8(__VA_ARGS__) |
| #define vsadd_vx_i16m8_m | ( | ... | ) | __riscv_vsadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define vsadd_vx_i16mf2 | ( | ... | ) | __riscv_vsadd_vx_i16mf2(__VA_ARGS__) |
| #define vsadd_vx_i16mf2_m | ( | ... | ) | __riscv_vsadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vsadd_vx_i16mf4 | ( | ... | ) | __riscv_vsadd_vx_i16mf4(__VA_ARGS__) |
| #define vsadd_vx_i16mf4_m | ( | ... | ) | __riscv_vsadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vsadd_vx_i32m1 | ( | ... | ) | __riscv_vsadd_vx_i32m1(__VA_ARGS__) |
| #define vsadd_vx_i32m1_m | ( | ... | ) | __riscv_vsadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define vsadd_vx_i32m2 | ( | ... | ) | __riscv_vsadd_vx_i32m2(__VA_ARGS__) |
| #define vsadd_vx_i32m2_m | ( | ... | ) | __riscv_vsadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define vsadd_vx_i32m4 | ( | ... | ) | __riscv_vsadd_vx_i32m4(__VA_ARGS__) |
| #define vsadd_vx_i32m4_m | ( | ... | ) | __riscv_vsadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define vsadd_vx_i32m8 | ( | ... | ) | __riscv_vsadd_vx_i32m8(__VA_ARGS__) |
| #define vsadd_vx_i32m8_m | ( | ... | ) | __riscv_vsadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define vsadd_vx_i32mf2 | ( | ... | ) | __riscv_vsadd_vx_i32mf2(__VA_ARGS__) |
| #define vsadd_vx_i32mf2_m | ( | ... | ) | __riscv_vsadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vsadd_vx_i64m1 | ( | ... | ) | __riscv_vsadd_vx_i64m1(__VA_ARGS__) |
| #define vsadd_vx_i64m1_m | ( | ... | ) | __riscv_vsadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define vsadd_vx_i64m2 | ( | ... | ) | __riscv_vsadd_vx_i64m2(__VA_ARGS__) |
| #define vsadd_vx_i64m2_m | ( | ... | ) | __riscv_vsadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define vsadd_vx_i64m4 | ( | ... | ) | __riscv_vsadd_vx_i64m4(__VA_ARGS__) |
| #define vsadd_vx_i64m4_m | ( | ... | ) | __riscv_vsadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define vsadd_vx_i64m8 | ( | ... | ) | __riscv_vsadd_vx_i64m8(__VA_ARGS__) |
| #define vsadd_vx_i64m8_m | ( | ... | ) | __riscv_vsadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define vsadd_vx_i8m1 | ( | ... | ) | __riscv_vsadd_vx_i8m1(__VA_ARGS__) |
| #define vsadd_vx_i8m1_m | ( | ... | ) | __riscv_vsadd_vx_i8m1_tumu(__VA_ARGS__) |
| #define vsadd_vx_i8m2 | ( | ... | ) | __riscv_vsadd_vx_i8m2(__VA_ARGS__) |
| #define vsadd_vx_i8m2_m | ( | ... | ) | __riscv_vsadd_vx_i8m2_tumu(__VA_ARGS__) |
| #define vsadd_vx_i8m4 | ( | ... | ) | __riscv_vsadd_vx_i8m4(__VA_ARGS__) |
| #define vsadd_vx_i8m4_m | ( | ... | ) | __riscv_vsadd_vx_i8m4_tumu(__VA_ARGS__) |
| #define vsadd_vx_i8m8 | ( | ... | ) | __riscv_vsadd_vx_i8m8(__VA_ARGS__) |
| #define vsadd_vx_i8m8_m | ( | ... | ) | __riscv_vsadd_vx_i8m8_tumu(__VA_ARGS__) |
| #define vsadd_vx_i8mf2 | ( | ... | ) | __riscv_vsadd_vx_i8mf2(__VA_ARGS__) |
| #define vsadd_vx_i8mf2_m | ( | ... | ) | __riscv_vsadd_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vsadd_vx_i8mf4 | ( | ... | ) | __riscv_vsadd_vx_i8mf4(__VA_ARGS__) |
| #define vsadd_vx_i8mf4_m | ( | ... | ) | __riscv_vsadd_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vsadd_vx_i8mf8 | ( | ... | ) | __riscv_vsadd_vx_i8mf8(__VA_ARGS__) |
| #define vsadd_vx_i8mf8_m | ( | ... | ) | __riscv_vsadd_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u16m1 | ( | ... | ) | __riscv_vsaddu_vv_u16m1(__VA_ARGS__) |
| #define vsaddu_vv_u16m1_m | ( | ... | ) | __riscv_vsaddu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u16m2 | ( | ... | ) | __riscv_vsaddu_vv_u16m2(__VA_ARGS__) |
| #define vsaddu_vv_u16m2_m | ( | ... | ) | __riscv_vsaddu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u16m4 | ( | ... | ) | __riscv_vsaddu_vv_u16m4(__VA_ARGS__) |
| #define vsaddu_vv_u16m4_m | ( | ... | ) | __riscv_vsaddu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u16m8 | ( | ... | ) | __riscv_vsaddu_vv_u16m8(__VA_ARGS__) |
| #define vsaddu_vv_u16m8_m | ( | ... | ) | __riscv_vsaddu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u16mf2 | ( | ... | ) | __riscv_vsaddu_vv_u16mf2(__VA_ARGS__) |
| #define vsaddu_vv_u16mf2_m | ( | ... | ) | __riscv_vsaddu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u16mf4 | ( | ... | ) | __riscv_vsaddu_vv_u16mf4(__VA_ARGS__) |
| #define vsaddu_vv_u16mf4_m | ( | ... | ) | __riscv_vsaddu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u32m1 | ( | ... | ) | __riscv_vsaddu_vv_u32m1(__VA_ARGS__) |
| #define vsaddu_vv_u32m1_m | ( | ... | ) | __riscv_vsaddu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u32m2 | ( | ... | ) | __riscv_vsaddu_vv_u32m2(__VA_ARGS__) |
| #define vsaddu_vv_u32m2_m | ( | ... | ) | __riscv_vsaddu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u32m4 | ( | ... | ) | __riscv_vsaddu_vv_u32m4(__VA_ARGS__) |
| #define vsaddu_vv_u32m4_m | ( | ... | ) | __riscv_vsaddu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u32m8 | ( | ... | ) | __riscv_vsaddu_vv_u32m8(__VA_ARGS__) |
| #define vsaddu_vv_u32m8_m | ( | ... | ) | __riscv_vsaddu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u32mf2 | ( | ... | ) | __riscv_vsaddu_vv_u32mf2(__VA_ARGS__) |
| #define vsaddu_vv_u32mf2_m | ( | ... | ) | __riscv_vsaddu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u64m1 | ( | ... | ) | __riscv_vsaddu_vv_u64m1(__VA_ARGS__) |
| #define vsaddu_vv_u64m1_m | ( | ... | ) | __riscv_vsaddu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u64m2 | ( | ... | ) | __riscv_vsaddu_vv_u64m2(__VA_ARGS__) |
| #define vsaddu_vv_u64m2_m | ( | ... | ) | __riscv_vsaddu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u64m4 | ( | ... | ) | __riscv_vsaddu_vv_u64m4(__VA_ARGS__) |
| #define vsaddu_vv_u64m4_m | ( | ... | ) | __riscv_vsaddu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u64m8 | ( | ... | ) | __riscv_vsaddu_vv_u64m8(__VA_ARGS__) |
| #define vsaddu_vv_u64m8_m | ( | ... | ) | __riscv_vsaddu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u8m1 | ( | ... | ) | __riscv_vsaddu_vv_u8m1(__VA_ARGS__) |
| #define vsaddu_vv_u8m1_m | ( | ... | ) | __riscv_vsaddu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u8m2 | ( | ... | ) | __riscv_vsaddu_vv_u8m2(__VA_ARGS__) |
| #define vsaddu_vv_u8m2_m | ( | ... | ) | __riscv_vsaddu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u8m4 | ( | ... | ) | __riscv_vsaddu_vv_u8m4(__VA_ARGS__) |
| #define vsaddu_vv_u8m4_m | ( | ... | ) | __riscv_vsaddu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u8m8 | ( | ... | ) | __riscv_vsaddu_vv_u8m8(__VA_ARGS__) |
| #define vsaddu_vv_u8m8_m | ( | ... | ) | __riscv_vsaddu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u8mf2 | ( | ... | ) | __riscv_vsaddu_vv_u8mf2(__VA_ARGS__) |
| #define vsaddu_vv_u8mf2_m | ( | ... | ) | __riscv_vsaddu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u8mf4 | ( | ... | ) | __riscv_vsaddu_vv_u8mf4(__VA_ARGS__) |
| #define vsaddu_vv_u8mf4_m | ( | ... | ) | __riscv_vsaddu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vsaddu_vv_u8mf8 | ( | ... | ) | __riscv_vsaddu_vv_u8mf8(__VA_ARGS__) |
| #define vsaddu_vv_u8mf8_m | ( | ... | ) | __riscv_vsaddu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u16m1 | ( | ... | ) | __riscv_vsaddu_vx_u16m1(__VA_ARGS__) |
| #define vsaddu_vx_u16m1_m | ( | ... | ) | __riscv_vsaddu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u16m2 | ( | ... | ) | __riscv_vsaddu_vx_u16m2(__VA_ARGS__) |
| #define vsaddu_vx_u16m2_m | ( | ... | ) | __riscv_vsaddu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u16m4 | ( | ... | ) | __riscv_vsaddu_vx_u16m4(__VA_ARGS__) |
| #define vsaddu_vx_u16m4_m | ( | ... | ) | __riscv_vsaddu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u16m8 | ( | ... | ) | __riscv_vsaddu_vx_u16m8(__VA_ARGS__) |
| #define vsaddu_vx_u16m8_m | ( | ... | ) | __riscv_vsaddu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u16mf2 | ( | ... | ) | __riscv_vsaddu_vx_u16mf2(__VA_ARGS__) |
| #define vsaddu_vx_u16mf2_m | ( | ... | ) | __riscv_vsaddu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u16mf4 | ( | ... | ) | __riscv_vsaddu_vx_u16mf4(__VA_ARGS__) |
| #define vsaddu_vx_u16mf4_m | ( | ... | ) | __riscv_vsaddu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u32m1 | ( | ... | ) | __riscv_vsaddu_vx_u32m1(__VA_ARGS__) |
| #define vsaddu_vx_u32m1_m | ( | ... | ) | __riscv_vsaddu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u32m2 | ( | ... | ) | __riscv_vsaddu_vx_u32m2(__VA_ARGS__) |
| #define vsaddu_vx_u32m2_m | ( | ... | ) | __riscv_vsaddu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u32m4 | ( | ... | ) | __riscv_vsaddu_vx_u32m4(__VA_ARGS__) |
| #define vsaddu_vx_u32m4_m | ( | ... | ) | __riscv_vsaddu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u32m8 | ( | ... | ) | __riscv_vsaddu_vx_u32m8(__VA_ARGS__) |
| #define vsaddu_vx_u32m8_m | ( | ... | ) | __riscv_vsaddu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u32mf2 | ( | ... | ) | __riscv_vsaddu_vx_u32mf2(__VA_ARGS__) |
| #define vsaddu_vx_u32mf2_m | ( | ... | ) | __riscv_vsaddu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u64m1 | ( | ... | ) | __riscv_vsaddu_vx_u64m1(__VA_ARGS__) |
| #define vsaddu_vx_u64m1_m | ( | ... | ) | __riscv_vsaddu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u64m2 | ( | ... | ) | __riscv_vsaddu_vx_u64m2(__VA_ARGS__) |
| #define vsaddu_vx_u64m2_m | ( | ... | ) | __riscv_vsaddu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u64m4 | ( | ... | ) | __riscv_vsaddu_vx_u64m4(__VA_ARGS__) |
| #define vsaddu_vx_u64m4_m | ( | ... | ) | __riscv_vsaddu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u64m8 | ( | ... | ) | __riscv_vsaddu_vx_u64m8(__VA_ARGS__) |
| #define vsaddu_vx_u64m8_m | ( | ... | ) | __riscv_vsaddu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u8m1 | ( | ... | ) | __riscv_vsaddu_vx_u8m1(__VA_ARGS__) |
| #define vsaddu_vx_u8m1_m | ( | ... | ) | __riscv_vsaddu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u8m2 | ( | ... | ) | __riscv_vsaddu_vx_u8m2(__VA_ARGS__) |
| #define vsaddu_vx_u8m2_m | ( | ... | ) | __riscv_vsaddu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u8m4 | ( | ... | ) | __riscv_vsaddu_vx_u8m4(__VA_ARGS__) |
| #define vsaddu_vx_u8m4_m | ( | ... | ) | __riscv_vsaddu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u8m8 | ( | ... | ) | __riscv_vsaddu_vx_u8m8(__VA_ARGS__) |
| #define vsaddu_vx_u8m8_m | ( | ... | ) | __riscv_vsaddu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u8mf2 | ( | ... | ) | __riscv_vsaddu_vx_u8mf2(__VA_ARGS__) |
| #define vsaddu_vx_u8mf2_m | ( | ... | ) | __riscv_vsaddu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u8mf4 | ( | ... | ) | __riscv_vsaddu_vx_u8mf4(__VA_ARGS__) |
| #define vsaddu_vx_u8mf4_m | ( | ... | ) | __riscv_vsaddu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vsaddu_vx_u8mf8 | ( | ... | ) | __riscv_vsaddu_vx_u8mf8(__VA_ARGS__) |
| #define vsaddu_vx_u8mf8_m | ( | ... | ) | __riscv_vsaddu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vsbc_vvm_i16m1 | ( | ... | ) | __riscv_vsbc_vvm_i16m1(__VA_ARGS__) |
| #define vsbc_vvm_i16m2 | ( | ... | ) | __riscv_vsbc_vvm_i16m2(__VA_ARGS__) |
| #define vsbc_vvm_i16m4 | ( | ... | ) | __riscv_vsbc_vvm_i16m4(__VA_ARGS__) |
| #define vsbc_vvm_i16m8 | ( | ... | ) | __riscv_vsbc_vvm_i16m8(__VA_ARGS__) |
| #define vsbc_vvm_i16mf2 | ( | ... | ) | __riscv_vsbc_vvm_i16mf2(__VA_ARGS__) |
| #define vsbc_vvm_i16mf4 | ( | ... | ) | __riscv_vsbc_vvm_i16mf4(__VA_ARGS__) |
| #define vsbc_vvm_i32m1 | ( | ... | ) | __riscv_vsbc_vvm_i32m1(__VA_ARGS__) |
| #define vsbc_vvm_i32m2 | ( | ... | ) | __riscv_vsbc_vvm_i32m2(__VA_ARGS__) |
| #define vsbc_vvm_i32m4 | ( | ... | ) | __riscv_vsbc_vvm_i32m4(__VA_ARGS__) |
| #define vsbc_vvm_i32m8 | ( | ... | ) | __riscv_vsbc_vvm_i32m8(__VA_ARGS__) |
| #define vsbc_vvm_i32mf2 | ( | ... | ) | __riscv_vsbc_vvm_i32mf2(__VA_ARGS__) |
| #define vsbc_vvm_i64m1 | ( | ... | ) | __riscv_vsbc_vvm_i64m1(__VA_ARGS__) |
| #define vsbc_vvm_i64m2 | ( | ... | ) | __riscv_vsbc_vvm_i64m2(__VA_ARGS__) |
| #define vsbc_vvm_i64m4 | ( | ... | ) | __riscv_vsbc_vvm_i64m4(__VA_ARGS__) |
| #define vsbc_vvm_i64m8 | ( | ... | ) | __riscv_vsbc_vvm_i64m8(__VA_ARGS__) |
| #define vsbc_vvm_i8m1 | ( | ... | ) | __riscv_vsbc_vvm_i8m1(__VA_ARGS__) |
| #define vsbc_vvm_i8m2 | ( | ... | ) | __riscv_vsbc_vvm_i8m2(__VA_ARGS__) |
| #define vsbc_vvm_i8m4 | ( | ... | ) | __riscv_vsbc_vvm_i8m4(__VA_ARGS__) |
| #define vsbc_vvm_i8m8 | ( | ... | ) | __riscv_vsbc_vvm_i8m8(__VA_ARGS__) |
| #define vsbc_vvm_i8mf2 | ( | ... | ) | __riscv_vsbc_vvm_i8mf2(__VA_ARGS__) |
| #define vsbc_vvm_i8mf4 | ( | ... | ) | __riscv_vsbc_vvm_i8mf4(__VA_ARGS__) |
| #define vsbc_vvm_i8mf8 | ( | ... | ) | __riscv_vsbc_vvm_i8mf8(__VA_ARGS__) |
| #define vsbc_vvm_u16m1 | ( | ... | ) | __riscv_vsbc_vvm_u16m1(__VA_ARGS__) |
| #define vsbc_vvm_u16m2 | ( | ... | ) | __riscv_vsbc_vvm_u16m2(__VA_ARGS__) |
| #define vsbc_vvm_u16m4 | ( | ... | ) | __riscv_vsbc_vvm_u16m4(__VA_ARGS__) |
| #define vsbc_vvm_u16m8 | ( | ... | ) | __riscv_vsbc_vvm_u16m8(__VA_ARGS__) |
| #define vsbc_vvm_u16mf2 | ( | ... | ) | __riscv_vsbc_vvm_u16mf2(__VA_ARGS__) |
| #define vsbc_vvm_u16mf4 | ( | ... | ) | __riscv_vsbc_vvm_u16mf4(__VA_ARGS__) |
| #define vsbc_vvm_u32m1 | ( | ... | ) | __riscv_vsbc_vvm_u32m1(__VA_ARGS__) |
| #define vsbc_vvm_u32m2 | ( | ... | ) | __riscv_vsbc_vvm_u32m2(__VA_ARGS__) |
| #define vsbc_vvm_u32m4 | ( | ... | ) | __riscv_vsbc_vvm_u32m4(__VA_ARGS__) |
| #define vsbc_vvm_u32m8 | ( | ... | ) | __riscv_vsbc_vvm_u32m8(__VA_ARGS__) |
| #define vsbc_vvm_u32mf2 | ( | ... | ) | __riscv_vsbc_vvm_u32mf2(__VA_ARGS__) |
| #define vsbc_vvm_u64m1 | ( | ... | ) | __riscv_vsbc_vvm_u64m1(__VA_ARGS__) |
| #define vsbc_vvm_u64m2 | ( | ... | ) | __riscv_vsbc_vvm_u64m2(__VA_ARGS__) |
| #define vsbc_vvm_u64m4 | ( | ... | ) | __riscv_vsbc_vvm_u64m4(__VA_ARGS__) |
| #define vsbc_vvm_u64m8 | ( | ... | ) | __riscv_vsbc_vvm_u64m8(__VA_ARGS__) |
| #define vsbc_vvm_u8m1 | ( | ... | ) | __riscv_vsbc_vvm_u8m1(__VA_ARGS__) |
| #define vsbc_vvm_u8m2 | ( | ... | ) | __riscv_vsbc_vvm_u8m2(__VA_ARGS__) |
| #define vsbc_vvm_u8m4 | ( | ... | ) | __riscv_vsbc_vvm_u8m4(__VA_ARGS__) |
| #define vsbc_vvm_u8m8 | ( | ... | ) | __riscv_vsbc_vvm_u8m8(__VA_ARGS__) |
| #define vsbc_vvm_u8mf2 | ( | ... | ) | __riscv_vsbc_vvm_u8mf2(__VA_ARGS__) |
| #define vsbc_vvm_u8mf4 | ( | ... | ) | __riscv_vsbc_vvm_u8mf4(__VA_ARGS__) |
| #define vsbc_vvm_u8mf8 | ( | ... | ) | __riscv_vsbc_vvm_u8mf8(__VA_ARGS__) |
| #define vsbc_vxm_i16m1 | ( | ... | ) | __riscv_vsbc_vxm_i16m1(__VA_ARGS__) |
| #define vsbc_vxm_i16m2 | ( | ... | ) | __riscv_vsbc_vxm_i16m2(__VA_ARGS__) |
| #define vsbc_vxm_i16m4 | ( | ... | ) | __riscv_vsbc_vxm_i16m4(__VA_ARGS__) |
| #define vsbc_vxm_i16m8 | ( | ... | ) | __riscv_vsbc_vxm_i16m8(__VA_ARGS__) |
| #define vsbc_vxm_i16mf2 | ( | ... | ) | __riscv_vsbc_vxm_i16mf2(__VA_ARGS__) |
| #define vsbc_vxm_i16mf4 | ( | ... | ) | __riscv_vsbc_vxm_i16mf4(__VA_ARGS__) |
| #define vsbc_vxm_i32m1 | ( | ... | ) | __riscv_vsbc_vxm_i32m1(__VA_ARGS__) |
| #define vsbc_vxm_i32m2 | ( | ... | ) | __riscv_vsbc_vxm_i32m2(__VA_ARGS__) |
| #define vsbc_vxm_i32m4 | ( | ... | ) | __riscv_vsbc_vxm_i32m4(__VA_ARGS__) |
| #define vsbc_vxm_i32m8 | ( | ... | ) | __riscv_vsbc_vxm_i32m8(__VA_ARGS__) |
| #define vsbc_vxm_i32mf2 | ( | ... | ) | __riscv_vsbc_vxm_i32mf2(__VA_ARGS__) |
| #define vsbc_vxm_i64m1 | ( | ... | ) | __riscv_vsbc_vxm_i64m1(__VA_ARGS__) |
| #define vsbc_vxm_i64m2 | ( | ... | ) | __riscv_vsbc_vxm_i64m2(__VA_ARGS__) |
| #define vsbc_vxm_i64m4 | ( | ... | ) | __riscv_vsbc_vxm_i64m4(__VA_ARGS__) |
| #define vsbc_vxm_i64m8 | ( | ... | ) | __riscv_vsbc_vxm_i64m8(__VA_ARGS__) |
| #define vsbc_vxm_i8m1 | ( | ... | ) | __riscv_vsbc_vxm_i8m1(__VA_ARGS__) |
| #define vsbc_vxm_i8m2 | ( | ... | ) | __riscv_vsbc_vxm_i8m2(__VA_ARGS__) |
| #define vsbc_vxm_i8m4 | ( | ... | ) | __riscv_vsbc_vxm_i8m4(__VA_ARGS__) |
| #define vsbc_vxm_i8m8 | ( | ... | ) | __riscv_vsbc_vxm_i8m8(__VA_ARGS__) |
| #define vsbc_vxm_i8mf2 | ( | ... | ) | __riscv_vsbc_vxm_i8mf2(__VA_ARGS__) |
| #define vsbc_vxm_i8mf4 | ( | ... | ) | __riscv_vsbc_vxm_i8mf4(__VA_ARGS__) |
| #define vsbc_vxm_i8mf8 | ( | ... | ) | __riscv_vsbc_vxm_i8mf8(__VA_ARGS__) |
| #define vsbc_vxm_u16m1 | ( | ... | ) | __riscv_vsbc_vxm_u16m1(__VA_ARGS__) |
| #define vsbc_vxm_u16m2 | ( | ... | ) | __riscv_vsbc_vxm_u16m2(__VA_ARGS__) |
| #define vsbc_vxm_u16m4 | ( | ... | ) | __riscv_vsbc_vxm_u16m4(__VA_ARGS__) |
| #define vsbc_vxm_u16m8 | ( | ... | ) | __riscv_vsbc_vxm_u16m8(__VA_ARGS__) |
| #define vsbc_vxm_u16mf2 | ( | ... | ) | __riscv_vsbc_vxm_u16mf2(__VA_ARGS__) |
| #define vsbc_vxm_u16mf4 | ( | ... | ) | __riscv_vsbc_vxm_u16mf4(__VA_ARGS__) |
| #define vsbc_vxm_u32m1 | ( | ... | ) | __riscv_vsbc_vxm_u32m1(__VA_ARGS__) |
| #define vsbc_vxm_u32m2 | ( | ... | ) | __riscv_vsbc_vxm_u32m2(__VA_ARGS__) |
| #define vsbc_vxm_u32m4 | ( | ... | ) | __riscv_vsbc_vxm_u32m4(__VA_ARGS__) |
| #define vsbc_vxm_u32m8 | ( | ... | ) | __riscv_vsbc_vxm_u32m8(__VA_ARGS__) |
| #define vsbc_vxm_u32mf2 | ( | ... | ) | __riscv_vsbc_vxm_u32mf2(__VA_ARGS__) |
| #define vsbc_vxm_u64m1 | ( | ... | ) | __riscv_vsbc_vxm_u64m1(__VA_ARGS__) |
| #define vsbc_vxm_u64m2 | ( | ... | ) | __riscv_vsbc_vxm_u64m2(__VA_ARGS__) |
| #define vsbc_vxm_u64m4 | ( | ... | ) | __riscv_vsbc_vxm_u64m4(__VA_ARGS__) |
| #define vsbc_vxm_u64m8 | ( | ... | ) | __riscv_vsbc_vxm_u64m8(__VA_ARGS__) |
| #define vsbc_vxm_u8m1 | ( | ... | ) | __riscv_vsbc_vxm_u8m1(__VA_ARGS__) |
| #define vsbc_vxm_u8m2 | ( | ... | ) | __riscv_vsbc_vxm_u8m2(__VA_ARGS__) |
| #define vsbc_vxm_u8m4 | ( | ... | ) | __riscv_vsbc_vxm_u8m4(__VA_ARGS__) |
| #define vsbc_vxm_u8m8 | ( | ... | ) | __riscv_vsbc_vxm_u8m8(__VA_ARGS__) |
| #define vsbc_vxm_u8mf2 | ( | ... | ) | __riscv_vsbc_vxm_u8mf2(__VA_ARGS__) |
| #define vsbc_vxm_u8mf4 | ( | ... | ) | __riscv_vsbc_vxm_u8mf4(__VA_ARGS__) |
| #define vsbc_vxm_u8mf8 | ( | ... | ) | __riscv_vsbc_vxm_u8mf8(__VA_ARGS__) |
| #define vse16_v_f16m1 | ( | ... | ) | __riscv_vse16_v_f16m1(__VA_ARGS__) |
| #define vse16_v_f16m1_m | ( | ... | ) | __riscv_vse16_v_f16m1_m(__VA_ARGS__) |
| #define vse16_v_f16m2 | ( | ... | ) | __riscv_vse16_v_f16m2(__VA_ARGS__) |
| #define vse16_v_f16m2_m | ( | ... | ) | __riscv_vse16_v_f16m2_m(__VA_ARGS__) |
| #define vse16_v_f16m4 | ( | ... | ) | __riscv_vse16_v_f16m4(__VA_ARGS__) |
| #define vse16_v_f16m4_m | ( | ... | ) | __riscv_vse16_v_f16m4_m(__VA_ARGS__) |
| #define vse16_v_f16m8 | ( | ... | ) | __riscv_vse16_v_f16m8(__VA_ARGS__) |
| #define vse16_v_f16m8_m | ( | ... | ) | __riscv_vse16_v_f16m8_m(__VA_ARGS__) |
| #define vse16_v_f16mf2 | ( | ... | ) | __riscv_vse16_v_f16mf2(__VA_ARGS__) |
| #define vse16_v_f16mf2_m | ( | ... | ) | __riscv_vse16_v_f16mf2_m(__VA_ARGS__) |
| #define vse16_v_f16mf4 | ( | ... | ) | __riscv_vse16_v_f16mf4(__VA_ARGS__) |
| #define vse16_v_f16mf4_m | ( | ... | ) | __riscv_vse16_v_f16mf4_m(__VA_ARGS__) |
| #define vse16_v_i16m1 | ( | ... | ) | __riscv_vse16_v_i16m1(__VA_ARGS__) |
| #define vse16_v_i16m1_m | ( | ... | ) | __riscv_vse16_v_i16m1_m(__VA_ARGS__) |
| #define vse16_v_i16m2 | ( | ... | ) | __riscv_vse16_v_i16m2(__VA_ARGS__) |
| #define vse16_v_i16m2_m | ( | ... | ) | __riscv_vse16_v_i16m2_m(__VA_ARGS__) |
| #define vse16_v_i16m4 | ( | ... | ) | __riscv_vse16_v_i16m4(__VA_ARGS__) |
| #define vse16_v_i16m4_m | ( | ... | ) | __riscv_vse16_v_i16m4_m(__VA_ARGS__) |
| #define vse16_v_i16m8 | ( | ... | ) | __riscv_vse16_v_i16m8(__VA_ARGS__) |
| #define vse16_v_i16m8_m | ( | ... | ) | __riscv_vse16_v_i16m8_m(__VA_ARGS__) |
| #define vse16_v_i16mf2 | ( | ... | ) | __riscv_vse16_v_i16mf2(__VA_ARGS__) |
| #define vse16_v_i16mf2_m | ( | ... | ) | __riscv_vse16_v_i16mf2_m(__VA_ARGS__) |
| #define vse16_v_i16mf4 | ( | ... | ) | __riscv_vse16_v_i16mf4(__VA_ARGS__) |
| #define vse16_v_i16mf4_m | ( | ... | ) | __riscv_vse16_v_i16mf4_m(__VA_ARGS__) |
| #define vse16_v_u16m1 | ( | ... | ) | __riscv_vse16_v_u16m1(__VA_ARGS__) |
| #define vse16_v_u16m1_m | ( | ... | ) | __riscv_vse16_v_u16m1_m(__VA_ARGS__) |
| #define vse16_v_u16m2 | ( | ... | ) | __riscv_vse16_v_u16m2(__VA_ARGS__) |
| #define vse16_v_u16m2_m | ( | ... | ) | __riscv_vse16_v_u16m2_m(__VA_ARGS__) |
| #define vse16_v_u16m4 | ( | ... | ) | __riscv_vse16_v_u16m4(__VA_ARGS__) |
| #define vse16_v_u16m4_m | ( | ... | ) | __riscv_vse16_v_u16m4_m(__VA_ARGS__) |
| #define vse16_v_u16m8 | ( | ... | ) | __riscv_vse16_v_u16m8(__VA_ARGS__) |
| #define vse16_v_u16m8_m | ( | ... | ) | __riscv_vse16_v_u16m8_m(__VA_ARGS__) |
| #define vse16_v_u16mf2 | ( | ... | ) | __riscv_vse16_v_u16mf2(__VA_ARGS__) |
| #define vse16_v_u16mf2_m | ( | ... | ) | __riscv_vse16_v_u16mf2_m(__VA_ARGS__) |
| #define vse16_v_u16mf4 | ( | ... | ) | __riscv_vse16_v_u16mf4(__VA_ARGS__) |
| #define vse16_v_u16mf4_m | ( | ... | ) | __riscv_vse16_v_u16mf4_m(__VA_ARGS__) |
| #define vse32_v_f32m1 | ( | ... | ) | __riscv_vse32_v_f32m1(__VA_ARGS__) |
| #define vse32_v_f32m1_m | ( | ... | ) | __riscv_vse32_v_f32m1_m(__VA_ARGS__) |
| #define vse32_v_f32m2 | ( | ... | ) | __riscv_vse32_v_f32m2(__VA_ARGS__) |
| #define vse32_v_f32m2_m | ( | ... | ) | __riscv_vse32_v_f32m2_m(__VA_ARGS__) |
| #define vse32_v_f32m4 | ( | ... | ) | __riscv_vse32_v_f32m4(__VA_ARGS__) |
| #define vse32_v_f32m4_m | ( | ... | ) | __riscv_vse32_v_f32m4_m(__VA_ARGS__) |
| #define vse32_v_f32m8 | ( | ... | ) | __riscv_vse32_v_f32m8(__VA_ARGS__) |
| #define vse32_v_f32m8_m | ( | ... | ) | __riscv_vse32_v_f32m8_m(__VA_ARGS__) |
| #define vse32_v_f32mf2 | ( | ... | ) | __riscv_vse32_v_f32mf2(__VA_ARGS__) |
| #define vse32_v_f32mf2_m | ( | ... | ) | __riscv_vse32_v_f32mf2_m(__VA_ARGS__) |
| #define vse32_v_i32m1 | ( | ... | ) | __riscv_vse32_v_i32m1(__VA_ARGS__) |
| #define vse32_v_i32m1_m | ( | ... | ) | __riscv_vse32_v_i32m1_m(__VA_ARGS__) |
| #define vse32_v_i32m2 | ( | ... | ) | __riscv_vse32_v_i32m2(__VA_ARGS__) |
| #define vse32_v_i32m2_m | ( | ... | ) | __riscv_vse32_v_i32m2_m(__VA_ARGS__) |
| #define vse32_v_i32m4 | ( | ... | ) | __riscv_vse32_v_i32m4(__VA_ARGS__) |
| #define vse32_v_i32m4_m | ( | ... | ) | __riscv_vse32_v_i32m4_m(__VA_ARGS__) |
| #define vse32_v_i32m8 | ( | ... | ) | __riscv_vse32_v_i32m8(__VA_ARGS__) |
| #define vse32_v_i32m8_m | ( | ... | ) | __riscv_vse32_v_i32m8_m(__VA_ARGS__) |
| #define vse32_v_i32mf2 | ( | ... | ) | __riscv_vse32_v_i32mf2(__VA_ARGS__) |
| #define vse32_v_i32mf2_m | ( | ... | ) | __riscv_vse32_v_i32mf2_m(__VA_ARGS__) |
| #define vse32_v_u32m1 | ( | ... | ) | __riscv_vse32_v_u32m1(__VA_ARGS__) |
| #define vse32_v_u32m1_m | ( | ... | ) | __riscv_vse32_v_u32m1_m(__VA_ARGS__) |
| #define vse32_v_u32m2 | ( | ... | ) | __riscv_vse32_v_u32m2(__VA_ARGS__) |
| #define vse32_v_u32m2_m | ( | ... | ) | __riscv_vse32_v_u32m2_m(__VA_ARGS__) |
| #define vse32_v_u32m4 | ( | ... | ) | __riscv_vse32_v_u32m4(__VA_ARGS__) |
| #define vse32_v_u32m4_m | ( | ... | ) | __riscv_vse32_v_u32m4_m(__VA_ARGS__) |
| #define vse32_v_u32m8 | ( | ... | ) | __riscv_vse32_v_u32m8(__VA_ARGS__) |
| #define vse32_v_u32m8_m | ( | ... | ) | __riscv_vse32_v_u32m8_m(__VA_ARGS__) |
| #define vse32_v_u32mf2 | ( | ... | ) | __riscv_vse32_v_u32mf2(__VA_ARGS__) |
| #define vse32_v_u32mf2_m | ( | ... | ) | __riscv_vse32_v_u32mf2_m(__VA_ARGS__) |
| #define vse64_v_f64m1 | ( | ... | ) | __riscv_vse64_v_f64m1(__VA_ARGS__) |
| #define vse64_v_f64m1_m | ( | ... | ) | __riscv_vse64_v_f64m1_m(__VA_ARGS__) |
| #define vse64_v_f64m2 | ( | ... | ) | __riscv_vse64_v_f64m2(__VA_ARGS__) |
| #define vse64_v_f64m2_m | ( | ... | ) | __riscv_vse64_v_f64m2_m(__VA_ARGS__) |
| #define vse64_v_f64m4 | ( | ... | ) | __riscv_vse64_v_f64m4(__VA_ARGS__) |
| #define vse64_v_f64m4_m | ( | ... | ) | __riscv_vse64_v_f64m4_m(__VA_ARGS__) |
| #define vse64_v_f64m8 | ( | ... | ) | __riscv_vse64_v_f64m8(__VA_ARGS__) |
| #define vse64_v_f64m8_m | ( | ... | ) | __riscv_vse64_v_f64m8_m(__VA_ARGS__) |
| #define vse64_v_i64m1 | ( | ... | ) | __riscv_vse64_v_i64m1(__VA_ARGS__) |
| #define vse64_v_i64m1_m | ( | ... | ) | __riscv_vse64_v_i64m1_m(__VA_ARGS__) |
| #define vse64_v_i64m2 | ( | ... | ) | __riscv_vse64_v_i64m2(__VA_ARGS__) |
| #define vse64_v_i64m2_m | ( | ... | ) | __riscv_vse64_v_i64m2_m(__VA_ARGS__) |
| #define vse64_v_i64m4 | ( | ... | ) | __riscv_vse64_v_i64m4(__VA_ARGS__) |
| #define vse64_v_i64m4_m | ( | ... | ) | __riscv_vse64_v_i64m4_m(__VA_ARGS__) |
| #define vse64_v_i64m8 | ( | ... | ) | __riscv_vse64_v_i64m8(__VA_ARGS__) |
| #define vse64_v_i64m8_m | ( | ... | ) | __riscv_vse64_v_i64m8_m(__VA_ARGS__) |
| #define vse64_v_u64m1 | ( | ... | ) | __riscv_vse64_v_u64m1(__VA_ARGS__) |
| #define vse64_v_u64m1_m | ( | ... | ) | __riscv_vse64_v_u64m1_m(__VA_ARGS__) |
| #define vse64_v_u64m2 | ( | ... | ) | __riscv_vse64_v_u64m2(__VA_ARGS__) |
| #define vse64_v_u64m2_m | ( | ... | ) | __riscv_vse64_v_u64m2_m(__VA_ARGS__) |
| #define vse64_v_u64m4 | ( | ... | ) | __riscv_vse64_v_u64m4(__VA_ARGS__) |
| #define vse64_v_u64m4_m | ( | ... | ) | __riscv_vse64_v_u64m4_m(__VA_ARGS__) |
| #define vse64_v_u64m8 | ( | ... | ) | __riscv_vse64_v_u64m8(__VA_ARGS__) |
| #define vse64_v_u64m8_m | ( | ... | ) | __riscv_vse64_v_u64m8_m(__VA_ARGS__) |
| #define vse8_v_i8m1 | ( | ... | ) | __riscv_vse8_v_i8m1(__VA_ARGS__) |
| #define vse8_v_i8m1_m | ( | ... | ) | __riscv_vse8_v_i8m1_m(__VA_ARGS__) |
| #define vse8_v_i8m2 | ( | ... | ) | __riscv_vse8_v_i8m2(__VA_ARGS__) |
| #define vse8_v_i8m2_m | ( | ... | ) | __riscv_vse8_v_i8m2_m(__VA_ARGS__) |
| #define vse8_v_i8m4 | ( | ... | ) | __riscv_vse8_v_i8m4(__VA_ARGS__) |
| #define vse8_v_i8m4_m | ( | ... | ) | __riscv_vse8_v_i8m4_m(__VA_ARGS__) |
| #define vse8_v_i8m8 | ( | ... | ) | __riscv_vse8_v_i8m8(__VA_ARGS__) |
| #define vse8_v_i8m8_m | ( | ... | ) | __riscv_vse8_v_i8m8_m(__VA_ARGS__) |
| #define vse8_v_i8mf2 | ( | ... | ) | __riscv_vse8_v_i8mf2(__VA_ARGS__) |
| #define vse8_v_i8mf2_m | ( | ... | ) | __riscv_vse8_v_i8mf2_m(__VA_ARGS__) |
| #define vse8_v_i8mf4 | ( | ... | ) | __riscv_vse8_v_i8mf4(__VA_ARGS__) |
| #define vse8_v_i8mf4_m | ( | ... | ) | __riscv_vse8_v_i8mf4_m(__VA_ARGS__) |
| #define vse8_v_i8mf8 | ( | ... | ) | __riscv_vse8_v_i8mf8(__VA_ARGS__) |
| #define vse8_v_i8mf8_m | ( | ... | ) | __riscv_vse8_v_i8mf8_m(__VA_ARGS__) |
| #define vse8_v_u8m1 | ( | ... | ) | __riscv_vse8_v_u8m1(__VA_ARGS__) |
| #define vse8_v_u8m1_m | ( | ... | ) | __riscv_vse8_v_u8m1_m(__VA_ARGS__) |
| #define vse8_v_u8m2 | ( | ... | ) | __riscv_vse8_v_u8m2(__VA_ARGS__) |
| #define vse8_v_u8m2_m | ( | ... | ) | __riscv_vse8_v_u8m2_m(__VA_ARGS__) |
| #define vse8_v_u8m4 | ( | ... | ) | __riscv_vse8_v_u8m4(__VA_ARGS__) |
| #define vse8_v_u8m4_m | ( | ... | ) | __riscv_vse8_v_u8m4_m(__VA_ARGS__) |
| #define vse8_v_u8m8 | ( | ... | ) | __riscv_vse8_v_u8m8(__VA_ARGS__) |
| #define vse8_v_u8m8_m | ( | ... | ) | __riscv_vse8_v_u8m8_m(__VA_ARGS__) |
| #define vse8_v_u8mf2 | ( | ... | ) | __riscv_vse8_v_u8mf2(__VA_ARGS__) |
| #define vse8_v_u8mf2_m | ( | ... | ) | __riscv_vse8_v_u8mf2_m(__VA_ARGS__) |
| #define vse8_v_u8mf4 | ( | ... | ) | __riscv_vse8_v_u8mf4(__VA_ARGS__) |
| #define vse8_v_u8mf4_m | ( | ... | ) | __riscv_vse8_v_u8mf4_m(__VA_ARGS__) |
| #define vse8_v_u8mf8 | ( | ... | ) | __riscv_vse8_v_u8mf8(__VA_ARGS__) |
| #define vse8_v_u8mf8_m | ( | ... | ) | __riscv_vse8_v_u8mf8_m(__VA_ARGS__) |
| #define vset_v_f16m1_f16m2 | ( | ... | ) | __riscv_vset_v_f16m1_f16m2(__VA_ARGS__) |
| #define vset_v_f16m1_f16m4 | ( | ... | ) | __riscv_vset_v_f16m1_f16m4(__VA_ARGS__) |
| #define vset_v_f16m1_f16m8 | ( | ... | ) | __riscv_vset_v_f16m1_f16m8(__VA_ARGS__) |
| #define vset_v_f16m2_f16m4 | ( | ... | ) | __riscv_vset_v_f16m2_f16m4(__VA_ARGS__) |
| #define vset_v_f16m2_f16m8 | ( | ... | ) | __riscv_vset_v_f16m2_f16m8(__VA_ARGS__) |
| #define vset_v_f16m4_f16m8 | ( | ... | ) | __riscv_vset_v_f16m4_f16m8(__VA_ARGS__) |
| #define vset_v_f32m1_f32m2 | ( | ... | ) | __riscv_vset_v_f32m1_f32m2(__VA_ARGS__) |
| #define vset_v_f32m1_f32m4 | ( | ... | ) | __riscv_vset_v_f32m1_f32m4(__VA_ARGS__) |
| #define vset_v_f32m1_f32m8 | ( | ... | ) | __riscv_vset_v_f32m1_f32m8(__VA_ARGS__) |
| #define vset_v_f32m2_f32m4 | ( | ... | ) | __riscv_vset_v_f32m2_f32m4(__VA_ARGS__) |
| #define vset_v_f32m2_f32m8 | ( | ... | ) | __riscv_vset_v_f32m2_f32m8(__VA_ARGS__) |
| #define vset_v_f32m4_f32m8 | ( | ... | ) | __riscv_vset_v_f32m4_f32m8(__VA_ARGS__) |
| #define vset_v_f64m1_f64m2 | ( | ... | ) | __riscv_vset_v_f64m1_f64m2(__VA_ARGS__) |
| #define vset_v_f64m1_f64m4 | ( | ... | ) | __riscv_vset_v_f64m1_f64m4(__VA_ARGS__) |
| #define vset_v_f64m1_f64m8 | ( | ... | ) | __riscv_vset_v_f64m1_f64m8(__VA_ARGS__) |
| #define vset_v_f64m2_f64m4 | ( | ... | ) | __riscv_vset_v_f64m2_f64m4(__VA_ARGS__) |
| #define vset_v_f64m2_f64m8 | ( | ... | ) | __riscv_vset_v_f64m2_f64m8(__VA_ARGS__) |
| #define vset_v_f64m4_f64m8 | ( | ... | ) | __riscv_vset_v_f64m4_f64m8(__VA_ARGS__) |
| #define vset_v_i16m1_i16m2 | ( | ... | ) | __riscv_vset_v_i16m1_i16m2(__VA_ARGS__) |
| #define vset_v_i16m1_i16m4 | ( | ... | ) | __riscv_vset_v_i16m1_i16m4(__VA_ARGS__) |
| #define vset_v_i16m1_i16m8 | ( | ... | ) | __riscv_vset_v_i16m1_i16m8(__VA_ARGS__) |
| #define vset_v_i16m2_i16m4 | ( | ... | ) | __riscv_vset_v_i16m2_i16m4(__VA_ARGS__) |
| #define vset_v_i16m2_i16m8 | ( | ... | ) | __riscv_vset_v_i16m2_i16m8(__VA_ARGS__) |
| #define vset_v_i16m4_i16m8 | ( | ... | ) | __riscv_vset_v_i16m4_i16m8(__VA_ARGS__) |
| #define vset_v_i32m1_i32m2 | ( | ... | ) | __riscv_vset_v_i32m1_i32m2(__VA_ARGS__) |
| #define vset_v_i32m1_i32m4 | ( | ... | ) | __riscv_vset_v_i32m1_i32m4(__VA_ARGS__) |
| #define vset_v_i32m1_i32m8 | ( | ... | ) | __riscv_vset_v_i32m1_i32m8(__VA_ARGS__) |
| #define vset_v_i32m2_i32m4 | ( | ... | ) | __riscv_vset_v_i32m2_i32m4(__VA_ARGS__) |
| #define vset_v_i32m2_i32m8 | ( | ... | ) | __riscv_vset_v_i32m2_i32m8(__VA_ARGS__) |
| #define vset_v_i32m4_i32m8 | ( | ... | ) | __riscv_vset_v_i32m4_i32m8(__VA_ARGS__) |
| #define vset_v_i64m1_i64m2 | ( | ... | ) | __riscv_vset_v_i64m1_i64m2(__VA_ARGS__) |
| #define vset_v_i64m1_i64m4 | ( | ... | ) | __riscv_vset_v_i64m1_i64m4(__VA_ARGS__) |
| #define vset_v_i64m1_i64m8 | ( | ... | ) | __riscv_vset_v_i64m1_i64m8(__VA_ARGS__) |
| #define vset_v_i64m2_i64m4 | ( | ... | ) | __riscv_vset_v_i64m2_i64m4(__VA_ARGS__) |
| #define vset_v_i64m2_i64m8 | ( | ... | ) | __riscv_vset_v_i64m2_i64m8(__VA_ARGS__) |
| #define vset_v_i64m4_i64m8 | ( | ... | ) | __riscv_vset_v_i64m4_i64m8(__VA_ARGS__) |
| #define vset_v_i8m1_i8m2 | ( | ... | ) | __riscv_vset_v_i8m1_i8m2(__VA_ARGS__) |
| #define vset_v_i8m1_i8m4 | ( | ... | ) | __riscv_vset_v_i8m1_i8m4(__VA_ARGS__) |
| #define vset_v_i8m1_i8m8 | ( | ... | ) | __riscv_vset_v_i8m1_i8m8(__VA_ARGS__) |
| #define vset_v_i8m2_i8m4 | ( | ... | ) | __riscv_vset_v_i8m2_i8m4(__VA_ARGS__) |
| #define vset_v_i8m2_i8m8 | ( | ... | ) | __riscv_vset_v_i8m2_i8m8(__VA_ARGS__) |
| #define vset_v_i8m4_i8m8 | ( | ... | ) | __riscv_vset_v_i8m4_i8m8(__VA_ARGS__) |
| #define vset_v_u16m1_u16m2 | ( | ... | ) | __riscv_vset_v_u16m1_u16m2(__VA_ARGS__) |
| #define vset_v_u16m1_u16m4 | ( | ... | ) | __riscv_vset_v_u16m1_u16m4(__VA_ARGS__) |
| #define vset_v_u16m1_u16m8 | ( | ... | ) | __riscv_vset_v_u16m1_u16m8(__VA_ARGS__) |
| #define vset_v_u16m2_u16m4 | ( | ... | ) | __riscv_vset_v_u16m2_u16m4(__VA_ARGS__) |
| #define vset_v_u16m2_u16m8 | ( | ... | ) | __riscv_vset_v_u16m2_u16m8(__VA_ARGS__) |
| #define vset_v_u16m4_u16m8 | ( | ... | ) | __riscv_vset_v_u16m4_u16m8(__VA_ARGS__) |
| #define vset_v_u32m1_u32m2 | ( | ... | ) | __riscv_vset_v_u32m1_u32m2(__VA_ARGS__) |
| #define vset_v_u32m1_u32m4 | ( | ... | ) | __riscv_vset_v_u32m1_u32m4(__VA_ARGS__) |
| #define vset_v_u32m1_u32m8 | ( | ... | ) | __riscv_vset_v_u32m1_u32m8(__VA_ARGS__) |
| #define vset_v_u32m2_u32m4 | ( | ... | ) | __riscv_vset_v_u32m2_u32m4(__VA_ARGS__) |
| #define vset_v_u32m2_u32m8 | ( | ... | ) | __riscv_vset_v_u32m2_u32m8(__VA_ARGS__) |
| #define vset_v_u32m4_u32m8 | ( | ... | ) | __riscv_vset_v_u32m4_u32m8(__VA_ARGS__) |
| #define vset_v_u64m1_u64m2 | ( | ... | ) | __riscv_vset_v_u64m1_u64m2(__VA_ARGS__) |
| #define vset_v_u64m1_u64m4 | ( | ... | ) | __riscv_vset_v_u64m1_u64m4(__VA_ARGS__) |
| #define vset_v_u64m1_u64m8 | ( | ... | ) | __riscv_vset_v_u64m1_u64m8(__VA_ARGS__) |
| #define vset_v_u64m2_u64m4 | ( | ... | ) | __riscv_vset_v_u64m2_u64m4(__VA_ARGS__) |
| #define vset_v_u64m2_u64m8 | ( | ... | ) | __riscv_vset_v_u64m2_u64m8(__VA_ARGS__) |
| #define vset_v_u64m4_u64m8 | ( | ... | ) | __riscv_vset_v_u64m4_u64m8(__VA_ARGS__) |
| #define vset_v_u8m1_u8m2 | ( | ... | ) | __riscv_vset_v_u8m1_u8m2(__VA_ARGS__) |
| #define vset_v_u8m1_u8m4 | ( | ... | ) | __riscv_vset_v_u8m1_u8m4(__VA_ARGS__) |
| #define vset_v_u8m1_u8m8 | ( | ... | ) | __riscv_vset_v_u8m1_u8m8(__VA_ARGS__) |
| #define vset_v_u8m2_u8m4 | ( | ... | ) | __riscv_vset_v_u8m2_u8m4(__VA_ARGS__) |
| #define vset_v_u8m2_u8m8 | ( | ... | ) | __riscv_vset_v_u8m2_u8m8(__VA_ARGS__) |
| #define vset_v_u8m4_u8m8 | ( | ... | ) | __riscv_vset_v_u8m4_u8m8(__VA_ARGS__) |
| #define vsetvl_e16m1 | ( | ... | ) | __riscv_vsetvl_e16m1(__VA_ARGS__) |
| #define vsetvl_e16m2 | ( | ... | ) | __riscv_vsetvl_e16m2(__VA_ARGS__) |
| #define vsetvl_e16m4 | ( | ... | ) | __riscv_vsetvl_e16m4(__VA_ARGS__) |
| #define vsetvl_e16m8 | ( | ... | ) | __riscv_vsetvl_e16m8(__VA_ARGS__) |
| #define vsetvl_e16mf2 | ( | ... | ) | __riscv_vsetvl_e16mf2(__VA_ARGS__) |
| #define vsetvl_e16mf4 | ( | ... | ) | __riscv_vsetvl_e16mf4(__VA_ARGS__) |
| #define vsetvl_e32m1 | ( | ... | ) | __riscv_vsetvl_e32m1(__VA_ARGS__) |
| #define vsetvl_e32m2 | ( | ... | ) | __riscv_vsetvl_e32m2(__VA_ARGS__) |
| #define vsetvl_e32m4 | ( | ... | ) | __riscv_vsetvl_e32m4(__VA_ARGS__) |
| #define vsetvl_e32m8 | ( | ... | ) | __riscv_vsetvl_e32m8(__VA_ARGS__) |
| #define vsetvl_e32mf2 | ( | ... | ) | __riscv_vsetvl_e32mf2(__VA_ARGS__) |
| #define vsetvl_e64m1 | ( | ... | ) | __riscv_vsetvl_e64m1(__VA_ARGS__) |
| #define vsetvl_e64m2 | ( | ... | ) | __riscv_vsetvl_e64m2(__VA_ARGS__) |
| #define vsetvl_e64m4 | ( | ... | ) | __riscv_vsetvl_e64m4(__VA_ARGS__) |
| #define vsetvl_e64m8 | ( | ... | ) | __riscv_vsetvl_e64m8(__VA_ARGS__) |
| #define vsetvl_e8m1 | ( | ... | ) | __riscv_vsetvl_e8m1(__VA_ARGS__) |
| #define vsetvl_e8m2 | ( | ... | ) | __riscv_vsetvl_e8m2(__VA_ARGS__) |
| #define vsetvl_e8m4 | ( | ... | ) | __riscv_vsetvl_e8m4(__VA_ARGS__) |
| #define vsetvl_e8m8 | ( | ... | ) | __riscv_vsetvl_e8m8(__VA_ARGS__) |
| #define vsetvl_e8mf2 | ( | ... | ) | __riscv_vsetvl_e8mf2(__VA_ARGS__) |
| #define vsetvl_e8mf4 | ( | ... | ) | __riscv_vsetvl_e8mf4(__VA_ARGS__) |
| #define vsetvl_e8mf8 | ( | ... | ) | __riscv_vsetvl_e8mf8(__VA_ARGS__) |
| #define vsetvlmax_e16m1 | ( | ... | ) | __riscv_vsetvlmax_e16m1(__VA_ARGS__) |
| #define vsetvlmax_e16m2 | ( | ... | ) | __riscv_vsetvlmax_e16m2(__VA_ARGS__) |
| #define vsetvlmax_e16m4 | ( | ... | ) | __riscv_vsetvlmax_e16m4(__VA_ARGS__) |
| #define vsetvlmax_e16m8 | ( | ... | ) | __riscv_vsetvlmax_e16m8(__VA_ARGS__) |
| #define vsetvlmax_e16mf2 | ( | ... | ) | __riscv_vsetvlmax_e16mf2(__VA_ARGS__) |
| #define vsetvlmax_e16mf4 | ( | ... | ) | __riscv_vsetvlmax_e16mf4(__VA_ARGS__) |
| #define vsetvlmax_e32m1 | ( | ... | ) | __riscv_vsetvlmax_e32m1(__VA_ARGS__) |
| #define vsetvlmax_e32m2 | ( | ... | ) | __riscv_vsetvlmax_e32m2(__VA_ARGS__) |
| #define vsetvlmax_e32m4 | ( | ... | ) | __riscv_vsetvlmax_e32m4(__VA_ARGS__) |
| #define vsetvlmax_e32m8 | ( | ... | ) | __riscv_vsetvlmax_e32m8(__VA_ARGS__) |
| #define vsetvlmax_e32mf2 | ( | ... | ) | __riscv_vsetvlmax_e32mf2(__VA_ARGS__) |
| #define vsetvlmax_e64m1 | ( | ... | ) | __riscv_vsetvlmax_e64m1(__VA_ARGS__) |
| #define vsetvlmax_e64m2 | ( | ... | ) | __riscv_vsetvlmax_e64m2(__VA_ARGS__) |
| #define vsetvlmax_e64m4 | ( | ... | ) | __riscv_vsetvlmax_e64m4(__VA_ARGS__) |
| #define vsetvlmax_e64m8 | ( | ... | ) | __riscv_vsetvlmax_e64m8(__VA_ARGS__) |
| #define vsetvlmax_e8m1 | ( | ... | ) | __riscv_vsetvlmax_e8m1(__VA_ARGS__) |
| #define vsetvlmax_e8m2 | ( | ... | ) | __riscv_vsetvlmax_e8m2(__VA_ARGS__) |
| #define vsetvlmax_e8m4 | ( | ... | ) | __riscv_vsetvlmax_e8m4(__VA_ARGS__) |
| #define vsetvlmax_e8m8 | ( | ... | ) | __riscv_vsetvlmax_e8m8(__VA_ARGS__) |
| #define vsetvlmax_e8mf2 | ( | ... | ) | __riscv_vsetvlmax_e8mf2(__VA_ARGS__) |
| #define vsetvlmax_e8mf4 | ( | ... | ) | __riscv_vsetvlmax_e8mf4(__VA_ARGS__) |
| #define vsetvlmax_e8mf8 | ( | ... | ) | __riscv_vsetvlmax_e8mf8(__VA_ARGS__) |
| #define vsext_vf2_i16m1 | ( | ... | ) | __riscv_vsext_vf2_i16m1(__VA_ARGS__) |
| #define vsext_vf2_i16m1_m | ( | ... | ) | __riscv_vsext_vf2_i16m1_tumu(__VA_ARGS__) |
| #define vsext_vf2_i16m2 | ( | ... | ) | __riscv_vsext_vf2_i16m2(__VA_ARGS__) |
| #define vsext_vf2_i16m2_m | ( | ... | ) | __riscv_vsext_vf2_i16m2_tumu(__VA_ARGS__) |
| #define vsext_vf2_i16m4 | ( | ... | ) | __riscv_vsext_vf2_i16m4(__VA_ARGS__) |
| #define vsext_vf2_i16m4_m | ( | ... | ) | __riscv_vsext_vf2_i16m4_tumu(__VA_ARGS__) |
| #define vsext_vf2_i16m8 | ( | ... | ) | __riscv_vsext_vf2_i16m8(__VA_ARGS__) |
| #define vsext_vf2_i16m8_m | ( | ... | ) | __riscv_vsext_vf2_i16m8_tumu(__VA_ARGS__) |
| #define vsext_vf2_i16mf2 | ( | ... | ) | __riscv_vsext_vf2_i16mf2(__VA_ARGS__) |
| #define vsext_vf2_i16mf2_m | ( | ... | ) | __riscv_vsext_vf2_i16mf2_tumu(__VA_ARGS__) |
| #define vsext_vf2_i16mf4 | ( | ... | ) | __riscv_vsext_vf2_i16mf4(__VA_ARGS__) |
| #define vsext_vf2_i16mf4_m | ( | ... | ) | __riscv_vsext_vf2_i16mf4_tumu(__VA_ARGS__) |
| #define vsext_vf2_i32m1 | ( | ... | ) | __riscv_vsext_vf2_i32m1(__VA_ARGS__) |
| #define vsext_vf2_i32m1_m | ( | ... | ) | __riscv_vsext_vf2_i32m1_tumu(__VA_ARGS__) |
| #define vsext_vf2_i32m2 | ( | ... | ) | __riscv_vsext_vf2_i32m2(__VA_ARGS__) |
| #define vsext_vf2_i32m2_m | ( | ... | ) | __riscv_vsext_vf2_i32m2_tumu(__VA_ARGS__) |
| #define vsext_vf2_i32m4 | ( | ... | ) | __riscv_vsext_vf2_i32m4(__VA_ARGS__) |
| #define vsext_vf2_i32m4_m | ( | ... | ) | __riscv_vsext_vf2_i32m4_tumu(__VA_ARGS__) |
| #define vsext_vf2_i32m8 | ( | ... | ) | __riscv_vsext_vf2_i32m8(__VA_ARGS__) |
| #define vsext_vf2_i32m8_m | ( | ... | ) | __riscv_vsext_vf2_i32m8_tumu(__VA_ARGS__) |
| #define vsext_vf2_i32mf2 | ( | ... | ) | __riscv_vsext_vf2_i32mf2(__VA_ARGS__) |
| #define vsext_vf2_i32mf2_m | ( | ... | ) | __riscv_vsext_vf2_i32mf2_tumu(__VA_ARGS__) |
| #define vsext_vf2_i64m1 | ( | ... | ) | __riscv_vsext_vf2_i64m1(__VA_ARGS__) |
| #define vsext_vf2_i64m1_m | ( | ... | ) | __riscv_vsext_vf2_i64m1_tumu(__VA_ARGS__) |
| #define vsext_vf2_i64m2 | ( | ... | ) | __riscv_vsext_vf2_i64m2(__VA_ARGS__) |
| #define vsext_vf2_i64m2_m | ( | ... | ) | __riscv_vsext_vf2_i64m2_tumu(__VA_ARGS__) |
| #define vsext_vf2_i64m4 | ( | ... | ) | __riscv_vsext_vf2_i64m4(__VA_ARGS__) |
| #define vsext_vf2_i64m4_m | ( | ... | ) | __riscv_vsext_vf2_i64m4_tumu(__VA_ARGS__) |
| #define vsext_vf2_i64m8 | ( | ... | ) | __riscv_vsext_vf2_i64m8(__VA_ARGS__) |
| #define vsext_vf2_i64m8_m | ( | ... | ) | __riscv_vsext_vf2_i64m8_tumu(__VA_ARGS__) |
| #define vsext_vf4_i32m1 | ( | ... | ) | __riscv_vsext_vf4_i32m1(__VA_ARGS__) |
| #define vsext_vf4_i32m1_m | ( | ... | ) | __riscv_vsext_vf4_i32m1_tumu(__VA_ARGS__) |
| #define vsext_vf4_i32m2 | ( | ... | ) | __riscv_vsext_vf4_i32m2(__VA_ARGS__) |
| #define vsext_vf4_i32m2_m | ( | ... | ) | __riscv_vsext_vf4_i32m2_tumu(__VA_ARGS__) |
| #define vsext_vf4_i32m4 | ( | ... | ) | __riscv_vsext_vf4_i32m4(__VA_ARGS__) |
| #define vsext_vf4_i32m4_m | ( | ... | ) | __riscv_vsext_vf4_i32m4_tumu(__VA_ARGS__) |
| #define vsext_vf4_i32m8 | ( | ... | ) | __riscv_vsext_vf4_i32m8(__VA_ARGS__) |
| #define vsext_vf4_i32m8_m | ( | ... | ) | __riscv_vsext_vf4_i32m8_tumu(__VA_ARGS__) |
| #define vsext_vf4_i32mf2 | ( | ... | ) | __riscv_vsext_vf4_i32mf2(__VA_ARGS__) |
| #define vsext_vf4_i32mf2_m | ( | ... | ) | __riscv_vsext_vf4_i32mf2_tumu(__VA_ARGS__) |
| #define vsext_vf4_i64m1 | ( | ... | ) | __riscv_vsext_vf4_i64m1(__VA_ARGS__) |
| #define vsext_vf4_i64m1_m | ( | ... | ) | __riscv_vsext_vf4_i64m1_tumu(__VA_ARGS__) |
| #define vsext_vf4_i64m2 | ( | ... | ) | __riscv_vsext_vf4_i64m2(__VA_ARGS__) |
| #define vsext_vf4_i64m2_m | ( | ... | ) | __riscv_vsext_vf4_i64m2_tumu(__VA_ARGS__) |
| #define vsext_vf4_i64m4 | ( | ... | ) | __riscv_vsext_vf4_i64m4(__VA_ARGS__) |
| #define vsext_vf4_i64m4_m | ( | ... | ) | __riscv_vsext_vf4_i64m4_tumu(__VA_ARGS__) |
| #define vsext_vf4_i64m8 | ( | ... | ) | __riscv_vsext_vf4_i64m8(__VA_ARGS__) |
| #define vsext_vf4_i64m8_m | ( | ... | ) | __riscv_vsext_vf4_i64m8_tumu(__VA_ARGS__) |
| #define vsext_vf8_i64m1 | ( | ... | ) | __riscv_vsext_vf8_i64m1(__VA_ARGS__) |
| #define vsext_vf8_i64m1_m | ( | ... | ) | __riscv_vsext_vf8_i64m1_tumu(__VA_ARGS__) |
| #define vsext_vf8_i64m2 | ( | ... | ) | __riscv_vsext_vf8_i64m2(__VA_ARGS__) |
| #define vsext_vf8_i64m2_m | ( | ... | ) | __riscv_vsext_vf8_i64m2_tumu(__VA_ARGS__) |
| #define vsext_vf8_i64m4 | ( | ... | ) | __riscv_vsext_vf8_i64m4(__VA_ARGS__) |
| #define vsext_vf8_i64m4_m | ( | ... | ) | __riscv_vsext_vf8_i64m4_tumu(__VA_ARGS__) |
| #define vsext_vf8_i64m8 | ( | ... | ) | __riscv_vsext_vf8_i64m8(__VA_ARGS__) |
| #define vsext_vf8_i64m8_m | ( | ... | ) | __riscv_vsext_vf8_i64m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i16m1 | ( | ... | ) | __riscv_vslide1down_vx_i16m1(__VA_ARGS__) |
| #define vslide1down_vx_i16m1_m | ( | ... | ) | __riscv_vslide1down_vx_i16m1_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i16m2 | ( | ... | ) | __riscv_vslide1down_vx_i16m2(__VA_ARGS__) |
| #define vslide1down_vx_i16m2_m | ( | ... | ) | __riscv_vslide1down_vx_i16m2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i16m4 | ( | ... | ) | __riscv_vslide1down_vx_i16m4(__VA_ARGS__) |
| #define vslide1down_vx_i16m4_m | ( | ... | ) | __riscv_vslide1down_vx_i16m4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i16m8 | ( | ... | ) | __riscv_vslide1down_vx_i16m8(__VA_ARGS__) |
| #define vslide1down_vx_i16m8_m | ( | ... | ) | __riscv_vslide1down_vx_i16m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i16mf2 | ( | ... | ) | __riscv_vslide1down_vx_i16mf2(__VA_ARGS__) |
| #define vslide1down_vx_i16mf2_m | ( | ... | ) | __riscv_vslide1down_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i16mf4 | ( | ... | ) | __riscv_vslide1down_vx_i16mf4(__VA_ARGS__) |
| #define vslide1down_vx_i16mf4_m | ( | ... | ) | __riscv_vslide1down_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i32m1 | ( | ... | ) | __riscv_vslide1down_vx_i32m1(__VA_ARGS__) |
| #define vslide1down_vx_i32m1_m | ( | ... | ) | __riscv_vslide1down_vx_i32m1_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i32m2 | ( | ... | ) | __riscv_vslide1down_vx_i32m2(__VA_ARGS__) |
| #define vslide1down_vx_i32m2_m | ( | ... | ) | __riscv_vslide1down_vx_i32m2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i32m4 | ( | ... | ) | __riscv_vslide1down_vx_i32m4(__VA_ARGS__) |
| #define vslide1down_vx_i32m4_m | ( | ... | ) | __riscv_vslide1down_vx_i32m4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i32m8 | ( | ... | ) | __riscv_vslide1down_vx_i32m8(__VA_ARGS__) |
| #define vslide1down_vx_i32m8_m | ( | ... | ) | __riscv_vslide1down_vx_i32m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i32mf2 | ( | ... | ) | __riscv_vslide1down_vx_i32mf2(__VA_ARGS__) |
| #define vslide1down_vx_i32mf2_m | ( | ... | ) | __riscv_vslide1down_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i64m1 | ( | ... | ) | __riscv_vslide1down_vx_i64m1(__VA_ARGS__) |
| #define vslide1down_vx_i64m1_m | ( | ... | ) | __riscv_vslide1down_vx_i64m1_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i64m2 | ( | ... | ) | __riscv_vslide1down_vx_i64m2(__VA_ARGS__) |
| #define vslide1down_vx_i64m2_m | ( | ... | ) | __riscv_vslide1down_vx_i64m2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i64m4 | ( | ... | ) | __riscv_vslide1down_vx_i64m4(__VA_ARGS__) |
| #define vslide1down_vx_i64m4_m | ( | ... | ) | __riscv_vslide1down_vx_i64m4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i64m8 | ( | ... | ) | __riscv_vslide1down_vx_i64m8(__VA_ARGS__) |
| #define vslide1down_vx_i64m8_m | ( | ... | ) | __riscv_vslide1down_vx_i64m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i8m1 | ( | ... | ) | __riscv_vslide1down_vx_i8m1(__VA_ARGS__) |
| #define vslide1down_vx_i8m1_m | ( | ... | ) | __riscv_vslide1down_vx_i8m1_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i8m2 | ( | ... | ) | __riscv_vslide1down_vx_i8m2(__VA_ARGS__) |
| #define vslide1down_vx_i8m2_m | ( | ... | ) | __riscv_vslide1down_vx_i8m2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i8m4 | ( | ... | ) | __riscv_vslide1down_vx_i8m4(__VA_ARGS__) |
| #define vslide1down_vx_i8m4_m | ( | ... | ) | __riscv_vslide1down_vx_i8m4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i8m8 | ( | ... | ) | __riscv_vslide1down_vx_i8m8(__VA_ARGS__) |
| #define vslide1down_vx_i8m8_m | ( | ... | ) | __riscv_vslide1down_vx_i8m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i8mf2 | ( | ... | ) | __riscv_vslide1down_vx_i8mf2(__VA_ARGS__) |
| #define vslide1down_vx_i8mf2_m | ( | ... | ) | __riscv_vslide1down_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i8mf4 | ( | ... | ) | __riscv_vslide1down_vx_i8mf4(__VA_ARGS__) |
| #define vslide1down_vx_i8mf4_m | ( | ... | ) | __riscv_vslide1down_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_i8mf8 | ( | ... | ) | __riscv_vslide1down_vx_i8mf8(__VA_ARGS__) |
| #define vslide1down_vx_i8mf8_m | ( | ... | ) | __riscv_vslide1down_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u16m1 | ( | ... | ) | __riscv_vslide1down_vx_u16m1(__VA_ARGS__) |
| #define vslide1down_vx_u16m1_m | ( | ... | ) | __riscv_vslide1down_vx_u16m1_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u16m2 | ( | ... | ) | __riscv_vslide1down_vx_u16m2(__VA_ARGS__) |
| #define vslide1down_vx_u16m2_m | ( | ... | ) | __riscv_vslide1down_vx_u16m2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u16m4 | ( | ... | ) | __riscv_vslide1down_vx_u16m4(__VA_ARGS__) |
| #define vslide1down_vx_u16m4_m | ( | ... | ) | __riscv_vslide1down_vx_u16m4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u16m8 | ( | ... | ) | __riscv_vslide1down_vx_u16m8(__VA_ARGS__) |
| #define vslide1down_vx_u16m8_m | ( | ... | ) | __riscv_vslide1down_vx_u16m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u16mf2 | ( | ... | ) | __riscv_vslide1down_vx_u16mf2(__VA_ARGS__) |
| #define vslide1down_vx_u16mf2_m | ( | ... | ) | __riscv_vslide1down_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u16mf4 | ( | ... | ) | __riscv_vslide1down_vx_u16mf4(__VA_ARGS__) |
| #define vslide1down_vx_u16mf4_m | ( | ... | ) | __riscv_vslide1down_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u32m1 | ( | ... | ) | __riscv_vslide1down_vx_u32m1(__VA_ARGS__) |
| #define vslide1down_vx_u32m1_m | ( | ... | ) | __riscv_vslide1down_vx_u32m1_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u32m2 | ( | ... | ) | __riscv_vslide1down_vx_u32m2(__VA_ARGS__) |
| #define vslide1down_vx_u32m2_m | ( | ... | ) | __riscv_vslide1down_vx_u32m2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u32m4 | ( | ... | ) | __riscv_vslide1down_vx_u32m4(__VA_ARGS__) |
| #define vslide1down_vx_u32m4_m | ( | ... | ) | __riscv_vslide1down_vx_u32m4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u32m8 | ( | ... | ) | __riscv_vslide1down_vx_u32m8(__VA_ARGS__) |
| #define vslide1down_vx_u32m8_m | ( | ... | ) | __riscv_vslide1down_vx_u32m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u32mf2 | ( | ... | ) | __riscv_vslide1down_vx_u32mf2(__VA_ARGS__) |
| #define vslide1down_vx_u32mf2_m | ( | ... | ) | __riscv_vslide1down_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u64m1 | ( | ... | ) | __riscv_vslide1down_vx_u64m1(__VA_ARGS__) |
| #define vslide1down_vx_u64m1_m | ( | ... | ) | __riscv_vslide1down_vx_u64m1_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u64m2 | ( | ... | ) | __riscv_vslide1down_vx_u64m2(__VA_ARGS__) |
| #define vslide1down_vx_u64m2_m | ( | ... | ) | __riscv_vslide1down_vx_u64m2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u64m4 | ( | ... | ) | __riscv_vslide1down_vx_u64m4(__VA_ARGS__) |
| #define vslide1down_vx_u64m4_m | ( | ... | ) | __riscv_vslide1down_vx_u64m4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u64m8 | ( | ... | ) | __riscv_vslide1down_vx_u64m8(__VA_ARGS__) |
| #define vslide1down_vx_u64m8_m | ( | ... | ) | __riscv_vslide1down_vx_u64m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u8m1 | ( | ... | ) | __riscv_vslide1down_vx_u8m1(__VA_ARGS__) |
| #define vslide1down_vx_u8m1_m | ( | ... | ) | __riscv_vslide1down_vx_u8m1_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u8m2 | ( | ... | ) | __riscv_vslide1down_vx_u8m2(__VA_ARGS__) |
| #define vslide1down_vx_u8m2_m | ( | ... | ) | __riscv_vslide1down_vx_u8m2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u8m4 | ( | ... | ) | __riscv_vslide1down_vx_u8m4(__VA_ARGS__) |
| #define vslide1down_vx_u8m4_m | ( | ... | ) | __riscv_vslide1down_vx_u8m4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u8m8 | ( | ... | ) | __riscv_vslide1down_vx_u8m8(__VA_ARGS__) |
| #define vslide1down_vx_u8m8_m | ( | ... | ) | __riscv_vslide1down_vx_u8m8_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u8mf2 | ( | ... | ) | __riscv_vslide1down_vx_u8mf2(__VA_ARGS__) |
| #define vslide1down_vx_u8mf2_m | ( | ... | ) | __riscv_vslide1down_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u8mf4 | ( | ... | ) | __riscv_vslide1down_vx_u8mf4(__VA_ARGS__) |
| #define vslide1down_vx_u8mf4_m | ( | ... | ) | __riscv_vslide1down_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vslide1down_vx_u8mf8 | ( | ... | ) | __riscv_vslide1down_vx_u8mf8(__VA_ARGS__) |
| #define vslide1down_vx_u8mf8_m | ( | ... | ) | __riscv_vslide1down_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i16m1 | ( | ... | ) | __riscv_vslide1up_vx_i16m1(__VA_ARGS__) |
| #define vslide1up_vx_i16m1_m | ( | ... | ) | __riscv_vslide1up_vx_i16m1_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i16m2 | ( | ... | ) | __riscv_vslide1up_vx_i16m2(__VA_ARGS__) |
| #define vslide1up_vx_i16m2_m | ( | ... | ) | __riscv_vslide1up_vx_i16m2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i16m4 | ( | ... | ) | __riscv_vslide1up_vx_i16m4(__VA_ARGS__) |
| #define vslide1up_vx_i16m4_m | ( | ... | ) | __riscv_vslide1up_vx_i16m4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i16m8 | ( | ... | ) | __riscv_vslide1up_vx_i16m8(__VA_ARGS__) |
| #define vslide1up_vx_i16m8_m | ( | ... | ) | __riscv_vslide1up_vx_i16m8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i16mf2 | ( | ... | ) | __riscv_vslide1up_vx_i16mf2(__VA_ARGS__) |
| #define vslide1up_vx_i16mf2_m | ( | ... | ) | __riscv_vslide1up_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i16mf4 | ( | ... | ) | __riscv_vslide1up_vx_i16mf4(__VA_ARGS__) |
| #define vslide1up_vx_i16mf4_m | ( | ... | ) | __riscv_vslide1up_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i32m1 | ( | ... | ) | __riscv_vslide1up_vx_i32m1(__VA_ARGS__) |
| #define vslide1up_vx_i32m1_m | ( | ... | ) | __riscv_vslide1up_vx_i32m1_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i32m2 | ( | ... | ) | __riscv_vslide1up_vx_i32m2(__VA_ARGS__) |
| #define vslide1up_vx_i32m2_m | ( | ... | ) | __riscv_vslide1up_vx_i32m2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i32m4 | ( | ... | ) | __riscv_vslide1up_vx_i32m4(__VA_ARGS__) |
| #define vslide1up_vx_i32m4_m | ( | ... | ) | __riscv_vslide1up_vx_i32m4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i32m8 | ( | ... | ) | __riscv_vslide1up_vx_i32m8(__VA_ARGS__) |
| #define vslide1up_vx_i32m8_m | ( | ... | ) | __riscv_vslide1up_vx_i32m8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i32mf2 | ( | ... | ) | __riscv_vslide1up_vx_i32mf2(__VA_ARGS__) |
| #define vslide1up_vx_i32mf2_m | ( | ... | ) | __riscv_vslide1up_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i64m1 | ( | ... | ) | __riscv_vslide1up_vx_i64m1(__VA_ARGS__) |
| #define vslide1up_vx_i64m1_m | ( | ... | ) | __riscv_vslide1up_vx_i64m1_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i64m2 | ( | ... | ) | __riscv_vslide1up_vx_i64m2(__VA_ARGS__) |
| #define vslide1up_vx_i64m2_m | ( | ... | ) | __riscv_vslide1up_vx_i64m2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i64m4 | ( | ... | ) | __riscv_vslide1up_vx_i64m4(__VA_ARGS__) |
| #define vslide1up_vx_i64m4_m | ( | ... | ) | __riscv_vslide1up_vx_i64m4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i64m8 | ( | ... | ) | __riscv_vslide1up_vx_i64m8(__VA_ARGS__) |
| #define vslide1up_vx_i64m8_m | ( | ... | ) | __riscv_vslide1up_vx_i64m8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i8m1 | ( | ... | ) | __riscv_vslide1up_vx_i8m1(__VA_ARGS__) |
| #define vslide1up_vx_i8m1_m | ( | ... | ) | __riscv_vslide1up_vx_i8m1_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i8m2 | ( | ... | ) | __riscv_vslide1up_vx_i8m2(__VA_ARGS__) |
| #define vslide1up_vx_i8m2_m | ( | ... | ) | __riscv_vslide1up_vx_i8m2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i8m4 | ( | ... | ) | __riscv_vslide1up_vx_i8m4(__VA_ARGS__) |
| #define vslide1up_vx_i8m4_m | ( | ... | ) | __riscv_vslide1up_vx_i8m4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i8m8 | ( | ... | ) | __riscv_vslide1up_vx_i8m8(__VA_ARGS__) |
| #define vslide1up_vx_i8m8_m | ( | ... | ) | __riscv_vslide1up_vx_i8m8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i8mf2 | ( | ... | ) | __riscv_vslide1up_vx_i8mf2(__VA_ARGS__) |
| #define vslide1up_vx_i8mf2_m | ( | ... | ) | __riscv_vslide1up_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i8mf4 | ( | ... | ) | __riscv_vslide1up_vx_i8mf4(__VA_ARGS__) |
| #define vslide1up_vx_i8mf4_m | ( | ... | ) | __riscv_vslide1up_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_i8mf8 | ( | ... | ) | __riscv_vslide1up_vx_i8mf8(__VA_ARGS__) |
| #define vslide1up_vx_i8mf8_m | ( | ... | ) | __riscv_vslide1up_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u16m1 | ( | ... | ) | __riscv_vslide1up_vx_u16m1(__VA_ARGS__) |
| #define vslide1up_vx_u16m1_m | ( | ... | ) | __riscv_vslide1up_vx_u16m1_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u16m2 | ( | ... | ) | __riscv_vslide1up_vx_u16m2(__VA_ARGS__) |
| #define vslide1up_vx_u16m2_m | ( | ... | ) | __riscv_vslide1up_vx_u16m2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u16m4 | ( | ... | ) | __riscv_vslide1up_vx_u16m4(__VA_ARGS__) |
| #define vslide1up_vx_u16m4_m | ( | ... | ) | __riscv_vslide1up_vx_u16m4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u16m8 | ( | ... | ) | __riscv_vslide1up_vx_u16m8(__VA_ARGS__) |
| #define vslide1up_vx_u16m8_m | ( | ... | ) | __riscv_vslide1up_vx_u16m8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u16mf2 | ( | ... | ) | __riscv_vslide1up_vx_u16mf2(__VA_ARGS__) |
| #define vslide1up_vx_u16mf2_m | ( | ... | ) | __riscv_vslide1up_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u16mf4 | ( | ... | ) | __riscv_vslide1up_vx_u16mf4(__VA_ARGS__) |
| #define vslide1up_vx_u16mf4_m | ( | ... | ) | __riscv_vslide1up_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u32m1 | ( | ... | ) | __riscv_vslide1up_vx_u32m1(__VA_ARGS__) |
| #define vslide1up_vx_u32m1_m | ( | ... | ) | __riscv_vslide1up_vx_u32m1_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u32m2 | ( | ... | ) | __riscv_vslide1up_vx_u32m2(__VA_ARGS__) |
| #define vslide1up_vx_u32m2_m | ( | ... | ) | __riscv_vslide1up_vx_u32m2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u32m4 | ( | ... | ) | __riscv_vslide1up_vx_u32m4(__VA_ARGS__) |
| #define vslide1up_vx_u32m4_m | ( | ... | ) | __riscv_vslide1up_vx_u32m4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u32m8 | ( | ... | ) | __riscv_vslide1up_vx_u32m8(__VA_ARGS__) |
| #define vslide1up_vx_u32m8_m | ( | ... | ) | __riscv_vslide1up_vx_u32m8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u32mf2 | ( | ... | ) | __riscv_vslide1up_vx_u32mf2(__VA_ARGS__) |
| #define vslide1up_vx_u32mf2_m | ( | ... | ) | __riscv_vslide1up_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u64m1 | ( | ... | ) | __riscv_vslide1up_vx_u64m1(__VA_ARGS__) |
| #define vslide1up_vx_u64m1_m | ( | ... | ) | __riscv_vslide1up_vx_u64m1_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u64m2 | ( | ... | ) | __riscv_vslide1up_vx_u64m2(__VA_ARGS__) |
| #define vslide1up_vx_u64m2_m | ( | ... | ) | __riscv_vslide1up_vx_u64m2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u64m4 | ( | ... | ) | __riscv_vslide1up_vx_u64m4(__VA_ARGS__) |
| #define vslide1up_vx_u64m4_m | ( | ... | ) | __riscv_vslide1up_vx_u64m4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u64m8 | ( | ... | ) | __riscv_vslide1up_vx_u64m8(__VA_ARGS__) |
| #define vslide1up_vx_u64m8_m | ( | ... | ) | __riscv_vslide1up_vx_u64m8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u8m1 | ( | ... | ) | __riscv_vslide1up_vx_u8m1(__VA_ARGS__) |
| #define vslide1up_vx_u8m1_m | ( | ... | ) | __riscv_vslide1up_vx_u8m1_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u8m2 | ( | ... | ) | __riscv_vslide1up_vx_u8m2(__VA_ARGS__) |
| #define vslide1up_vx_u8m2_m | ( | ... | ) | __riscv_vslide1up_vx_u8m2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u8m4 | ( | ... | ) | __riscv_vslide1up_vx_u8m4(__VA_ARGS__) |
| #define vslide1up_vx_u8m4_m | ( | ... | ) | __riscv_vslide1up_vx_u8m4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u8m8 | ( | ... | ) | __riscv_vslide1up_vx_u8m8(__VA_ARGS__) |
| #define vslide1up_vx_u8m8_m | ( | ... | ) | __riscv_vslide1up_vx_u8m8_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u8mf2 | ( | ... | ) | __riscv_vslide1up_vx_u8mf2(__VA_ARGS__) |
| #define vslide1up_vx_u8mf2_m | ( | ... | ) | __riscv_vslide1up_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u8mf4 | ( | ... | ) | __riscv_vslide1up_vx_u8mf4(__VA_ARGS__) |
| #define vslide1up_vx_u8mf4_m | ( | ... | ) | __riscv_vslide1up_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vslide1up_vx_u8mf8 | ( | ... | ) | __riscv_vslide1up_vx_u8mf8(__VA_ARGS__) |
| #define vslide1up_vx_u8mf8_m | ( | ... | ) | __riscv_vslide1up_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f16m1 | ( | ... | ) | __riscv_vslidedown_vx_f16m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_f16m1_m | ( | ... | ) | __riscv_vslidedown_vx_f16m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f16m2 | ( | ... | ) | __riscv_vslidedown_vx_f16m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_f16m2_m | ( | ... | ) | __riscv_vslidedown_vx_f16m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f16m4 | ( | ... | ) | __riscv_vslidedown_vx_f16m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_f16m4_m | ( | ... | ) | __riscv_vslidedown_vx_f16m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f16m8 | ( | ... | ) | __riscv_vslidedown_vx_f16m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_f16m8_m | ( | ... | ) | __riscv_vslidedown_vx_f16m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f16mf2 | ( | ... | ) | __riscv_vslidedown_vx_f16mf2_tu(__VA_ARGS__) |
| #define vslidedown_vx_f16mf2_m | ( | ... | ) | __riscv_vslidedown_vx_f16mf2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f16mf4 | ( | ... | ) | __riscv_vslidedown_vx_f16mf4_tu(__VA_ARGS__) |
| #define vslidedown_vx_f16mf4_m | ( | ... | ) | __riscv_vslidedown_vx_f16mf4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f32m1 | ( | ... | ) | __riscv_vslidedown_vx_f32m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_f32m1_m | ( | ... | ) | __riscv_vslidedown_vx_f32m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f32m2 | ( | ... | ) | __riscv_vslidedown_vx_f32m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_f32m2_m | ( | ... | ) | __riscv_vslidedown_vx_f32m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f32m4 | ( | ... | ) | __riscv_vslidedown_vx_f32m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_f32m4_m | ( | ... | ) | __riscv_vslidedown_vx_f32m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f32m8 | ( | ... | ) | __riscv_vslidedown_vx_f32m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_f32m8_m | ( | ... | ) | __riscv_vslidedown_vx_f32m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f32mf2 | ( | ... | ) | __riscv_vslidedown_vx_f32mf2_tu(__VA_ARGS__) |
| #define vslidedown_vx_f32mf2_m | ( | ... | ) | __riscv_vslidedown_vx_f32mf2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f64m1 | ( | ... | ) | __riscv_vslidedown_vx_f64m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_f64m1_m | ( | ... | ) | __riscv_vslidedown_vx_f64m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f64m2 | ( | ... | ) | __riscv_vslidedown_vx_f64m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_f64m2_m | ( | ... | ) | __riscv_vslidedown_vx_f64m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f64m4 | ( | ... | ) | __riscv_vslidedown_vx_f64m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_f64m4_m | ( | ... | ) | __riscv_vslidedown_vx_f64m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_f64m8 | ( | ... | ) | __riscv_vslidedown_vx_f64m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_f64m8_m | ( | ... | ) | __riscv_vslidedown_vx_f64m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i16m1 | ( | ... | ) | __riscv_vslidedown_vx_i16m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_i16m1_m | ( | ... | ) | __riscv_vslidedown_vx_i16m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i16m2 | ( | ... | ) | __riscv_vslidedown_vx_i16m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_i16m2_m | ( | ... | ) | __riscv_vslidedown_vx_i16m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i16m4 | ( | ... | ) | __riscv_vslidedown_vx_i16m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_i16m4_m | ( | ... | ) | __riscv_vslidedown_vx_i16m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i16m8 | ( | ... | ) | __riscv_vslidedown_vx_i16m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_i16m8_m | ( | ... | ) | __riscv_vslidedown_vx_i16m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i16mf2 | ( | ... | ) | __riscv_vslidedown_vx_i16mf2_tu(__VA_ARGS__) |
| #define vslidedown_vx_i16mf2_m | ( | ... | ) | __riscv_vslidedown_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i16mf4 | ( | ... | ) | __riscv_vslidedown_vx_i16mf4_tu(__VA_ARGS__) |
| #define vslidedown_vx_i16mf4_m | ( | ... | ) | __riscv_vslidedown_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i32m1 | ( | ... | ) | __riscv_vslidedown_vx_i32m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_i32m1_m | ( | ... | ) | __riscv_vslidedown_vx_i32m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i32m2 | ( | ... | ) | __riscv_vslidedown_vx_i32m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_i32m2_m | ( | ... | ) | __riscv_vslidedown_vx_i32m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i32m4 | ( | ... | ) | __riscv_vslidedown_vx_i32m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_i32m4_m | ( | ... | ) | __riscv_vslidedown_vx_i32m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i32m8 | ( | ... | ) | __riscv_vslidedown_vx_i32m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_i32m8_m | ( | ... | ) | __riscv_vslidedown_vx_i32m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i32mf2 | ( | ... | ) | __riscv_vslidedown_vx_i32mf2_tu(__VA_ARGS__) |
| #define vslidedown_vx_i32mf2_m | ( | ... | ) | __riscv_vslidedown_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i64m1 | ( | ... | ) | __riscv_vslidedown_vx_i64m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_i64m1_m | ( | ... | ) | __riscv_vslidedown_vx_i64m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i64m2 | ( | ... | ) | __riscv_vslidedown_vx_i64m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_i64m2_m | ( | ... | ) | __riscv_vslidedown_vx_i64m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i64m4 | ( | ... | ) | __riscv_vslidedown_vx_i64m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_i64m4_m | ( | ... | ) | __riscv_vslidedown_vx_i64m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i64m8 | ( | ... | ) | __riscv_vslidedown_vx_i64m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_i64m8_m | ( | ... | ) | __riscv_vslidedown_vx_i64m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i8m1 | ( | ... | ) | __riscv_vslidedown_vx_i8m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_i8m1_m | ( | ... | ) | __riscv_vslidedown_vx_i8m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i8m2 | ( | ... | ) | __riscv_vslidedown_vx_i8m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_i8m2_m | ( | ... | ) | __riscv_vslidedown_vx_i8m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i8m4 | ( | ... | ) | __riscv_vslidedown_vx_i8m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_i8m4_m | ( | ... | ) | __riscv_vslidedown_vx_i8m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i8m8 | ( | ... | ) | __riscv_vslidedown_vx_i8m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_i8m8_m | ( | ... | ) | __riscv_vslidedown_vx_i8m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i8mf2 | ( | ... | ) | __riscv_vslidedown_vx_i8mf2_tu(__VA_ARGS__) |
| #define vslidedown_vx_i8mf2_m | ( | ... | ) | __riscv_vslidedown_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i8mf4 | ( | ... | ) | __riscv_vslidedown_vx_i8mf4_tu(__VA_ARGS__) |
| #define vslidedown_vx_i8mf4_m | ( | ... | ) | __riscv_vslidedown_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_i8mf8 | ( | ... | ) | __riscv_vslidedown_vx_i8mf8_tu(__VA_ARGS__) |
| #define vslidedown_vx_i8mf8_m | ( | ... | ) | __riscv_vslidedown_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u16m1 | ( | ... | ) | __riscv_vslidedown_vx_u16m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_u16m1_m | ( | ... | ) | __riscv_vslidedown_vx_u16m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u16m2 | ( | ... | ) | __riscv_vslidedown_vx_u16m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_u16m2_m | ( | ... | ) | __riscv_vslidedown_vx_u16m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u16m4 | ( | ... | ) | __riscv_vslidedown_vx_u16m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_u16m4_m | ( | ... | ) | __riscv_vslidedown_vx_u16m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u16m8 | ( | ... | ) | __riscv_vslidedown_vx_u16m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_u16m8_m | ( | ... | ) | __riscv_vslidedown_vx_u16m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u16mf2 | ( | ... | ) | __riscv_vslidedown_vx_u16mf2_tu(__VA_ARGS__) |
| #define vslidedown_vx_u16mf2_m | ( | ... | ) | __riscv_vslidedown_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u16mf4 | ( | ... | ) | __riscv_vslidedown_vx_u16mf4_tu(__VA_ARGS__) |
| #define vslidedown_vx_u16mf4_m | ( | ... | ) | __riscv_vslidedown_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u32m1 | ( | ... | ) | __riscv_vslidedown_vx_u32m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_u32m1_m | ( | ... | ) | __riscv_vslidedown_vx_u32m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u32m2 | ( | ... | ) | __riscv_vslidedown_vx_u32m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_u32m2_m | ( | ... | ) | __riscv_vslidedown_vx_u32m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u32m4 | ( | ... | ) | __riscv_vslidedown_vx_u32m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_u32m4_m | ( | ... | ) | __riscv_vslidedown_vx_u32m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u32m8 | ( | ... | ) | __riscv_vslidedown_vx_u32m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_u32m8_m | ( | ... | ) | __riscv_vslidedown_vx_u32m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u32mf2 | ( | ... | ) | __riscv_vslidedown_vx_u32mf2_tu(__VA_ARGS__) |
| #define vslidedown_vx_u32mf2_m | ( | ... | ) | __riscv_vslidedown_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u64m1 | ( | ... | ) | __riscv_vslidedown_vx_u64m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_u64m1_m | ( | ... | ) | __riscv_vslidedown_vx_u64m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u64m2 | ( | ... | ) | __riscv_vslidedown_vx_u64m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_u64m2_m | ( | ... | ) | __riscv_vslidedown_vx_u64m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u64m4 | ( | ... | ) | __riscv_vslidedown_vx_u64m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_u64m4_m | ( | ... | ) | __riscv_vslidedown_vx_u64m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u64m8 | ( | ... | ) | __riscv_vslidedown_vx_u64m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_u64m8_m | ( | ... | ) | __riscv_vslidedown_vx_u64m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u8m1 | ( | ... | ) | __riscv_vslidedown_vx_u8m1_tu(__VA_ARGS__) |
| #define vslidedown_vx_u8m1_m | ( | ... | ) | __riscv_vslidedown_vx_u8m1_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u8m2 | ( | ... | ) | __riscv_vslidedown_vx_u8m2_tu(__VA_ARGS__) |
| #define vslidedown_vx_u8m2_m | ( | ... | ) | __riscv_vslidedown_vx_u8m2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u8m4 | ( | ... | ) | __riscv_vslidedown_vx_u8m4_tu(__VA_ARGS__) |
| #define vslidedown_vx_u8m4_m | ( | ... | ) | __riscv_vslidedown_vx_u8m4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u8m8 | ( | ... | ) | __riscv_vslidedown_vx_u8m8_tu(__VA_ARGS__) |
| #define vslidedown_vx_u8m8_m | ( | ... | ) | __riscv_vslidedown_vx_u8m8_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u8mf2 | ( | ... | ) | __riscv_vslidedown_vx_u8mf2_tu(__VA_ARGS__) |
| #define vslidedown_vx_u8mf2_m | ( | ... | ) | __riscv_vslidedown_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u8mf4 | ( | ... | ) | __riscv_vslidedown_vx_u8mf4_tu(__VA_ARGS__) |
| #define vslidedown_vx_u8mf4_m | ( | ... | ) | __riscv_vslidedown_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vslidedown_vx_u8mf8 | ( | ... | ) | __riscv_vslidedown_vx_u8mf8_tu(__VA_ARGS__) |
| #define vslidedown_vx_u8mf8_m | ( | ... | ) | __riscv_vslidedown_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vslideup_vx_f16m1 | ( | ... | ) | __riscv_vslideup_vx_f16m1_tu(__VA_ARGS__) |
| #define vslideup_vx_f16m1_m | ( | ... | ) | __riscv_vslideup_vx_f16m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_f16m2 | ( | ... | ) | __riscv_vslideup_vx_f16m2_tu(__VA_ARGS__) |
| #define vslideup_vx_f16m2_m | ( | ... | ) | __riscv_vslideup_vx_f16m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_f16m4 | ( | ... | ) | __riscv_vslideup_vx_f16m4_tu(__VA_ARGS__) |
| #define vslideup_vx_f16m4_m | ( | ... | ) | __riscv_vslideup_vx_f16m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_f16m8 | ( | ... | ) | __riscv_vslideup_vx_f16m8_tu(__VA_ARGS__) |
| #define vslideup_vx_f16m8_m | ( | ... | ) | __riscv_vslideup_vx_f16m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_f16mf2 | ( | ... | ) | __riscv_vslideup_vx_f16mf2_tu(__VA_ARGS__) |
| #define vslideup_vx_f16mf2_m | ( | ... | ) | __riscv_vslideup_vx_f16mf2_tumu(__VA_ARGS__) |
| #define vslideup_vx_f16mf4 | ( | ... | ) | __riscv_vslideup_vx_f16mf4_tu(__VA_ARGS__) |
| #define vslideup_vx_f16mf4_m | ( | ... | ) | __riscv_vslideup_vx_f16mf4_tumu(__VA_ARGS__) |
| #define vslideup_vx_f32m1 | ( | ... | ) | __riscv_vslideup_vx_f32m1_tu(__VA_ARGS__) |
| #define vslideup_vx_f32m1_m | ( | ... | ) | __riscv_vslideup_vx_f32m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_f32m2 | ( | ... | ) | __riscv_vslideup_vx_f32m2_tu(__VA_ARGS__) |
| #define vslideup_vx_f32m2_m | ( | ... | ) | __riscv_vslideup_vx_f32m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_f32m4 | ( | ... | ) | __riscv_vslideup_vx_f32m4_tu(__VA_ARGS__) |
| #define vslideup_vx_f32m4_m | ( | ... | ) | __riscv_vslideup_vx_f32m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_f32m8 | ( | ... | ) | __riscv_vslideup_vx_f32m8_tu(__VA_ARGS__) |
| #define vslideup_vx_f32m8_m | ( | ... | ) | __riscv_vslideup_vx_f32m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_f32mf2 | ( | ... | ) | __riscv_vslideup_vx_f32mf2_tu(__VA_ARGS__) |
| #define vslideup_vx_f32mf2_m | ( | ... | ) | __riscv_vslideup_vx_f32mf2_tumu(__VA_ARGS__) |
| #define vslideup_vx_f64m1 | ( | ... | ) | __riscv_vslideup_vx_f64m1_tu(__VA_ARGS__) |
| #define vslideup_vx_f64m1_m | ( | ... | ) | __riscv_vslideup_vx_f64m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_f64m2 | ( | ... | ) | __riscv_vslideup_vx_f64m2_tu(__VA_ARGS__) |
| #define vslideup_vx_f64m2_m | ( | ... | ) | __riscv_vslideup_vx_f64m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_f64m4 | ( | ... | ) | __riscv_vslideup_vx_f64m4_tu(__VA_ARGS__) |
| #define vslideup_vx_f64m4_m | ( | ... | ) | __riscv_vslideup_vx_f64m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_f64m8 | ( | ... | ) | __riscv_vslideup_vx_f64m8_tu(__VA_ARGS__) |
| #define vslideup_vx_f64m8_m | ( | ... | ) | __riscv_vslideup_vx_f64m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_i16m1 | ( | ... | ) | __riscv_vslideup_vx_i16m1_tu(__VA_ARGS__) |
| #define vslideup_vx_i16m1_m | ( | ... | ) | __riscv_vslideup_vx_i16m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_i16m2 | ( | ... | ) | __riscv_vslideup_vx_i16m2_tu(__VA_ARGS__) |
| #define vslideup_vx_i16m2_m | ( | ... | ) | __riscv_vslideup_vx_i16m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_i16m4 | ( | ... | ) | __riscv_vslideup_vx_i16m4_tu(__VA_ARGS__) |
| #define vslideup_vx_i16m4_m | ( | ... | ) | __riscv_vslideup_vx_i16m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_i16m8 | ( | ... | ) | __riscv_vslideup_vx_i16m8_tu(__VA_ARGS__) |
| #define vslideup_vx_i16m8_m | ( | ... | ) | __riscv_vslideup_vx_i16m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_i16mf2 | ( | ... | ) | __riscv_vslideup_vx_i16mf2_tu(__VA_ARGS__) |
| #define vslideup_vx_i16mf2_m | ( | ... | ) | __riscv_vslideup_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vslideup_vx_i16mf4 | ( | ... | ) | __riscv_vslideup_vx_i16mf4_tu(__VA_ARGS__) |
| #define vslideup_vx_i16mf4_m | ( | ... | ) | __riscv_vslideup_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vslideup_vx_i32m1 | ( | ... | ) | __riscv_vslideup_vx_i32m1_tu(__VA_ARGS__) |
| #define vslideup_vx_i32m1_m | ( | ... | ) | __riscv_vslideup_vx_i32m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_i32m2 | ( | ... | ) | __riscv_vslideup_vx_i32m2_tu(__VA_ARGS__) |
| #define vslideup_vx_i32m2_m | ( | ... | ) | __riscv_vslideup_vx_i32m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_i32m4 | ( | ... | ) | __riscv_vslideup_vx_i32m4_tu(__VA_ARGS__) |
| #define vslideup_vx_i32m4_m | ( | ... | ) | __riscv_vslideup_vx_i32m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_i32m8 | ( | ... | ) | __riscv_vslideup_vx_i32m8_tu(__VA_ARGS__) |
| #define vslideup_vx_i32m8_m | ( | ... | ) | __riscv_vslideup_vx_i32m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_i32mf2 | ( | ... | ) | __riscv_vslideup_vx_i32mf2_tu(__VA_ARGS__) |
| #define vslideup_vx_i32mf2_m | ( | ... | ) | __riscv_vslideup_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vslideup_vx_i64m1 | ( | ... | ) | __riscv_vslideup_vx_i64m1_tu(__VA_ARGS__) |
| #define vslideup_vx_i64m1_m | ( | ... | ) | __riscv_vslideup_vx_i64m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_i64m2 | ( | ... | ) | __riscv_vslideup_vx_i64m2_tu(__VA_ARGS__) |
| #define vslideup_vx_i64m2_m | ( | ... | ) | __riscv_vslideup_vx_i64m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_i64m4 | ( | ... | ) | __riscv_vslideup_vx_i64m4_tu(__VA_ARGS__) |
| #define vslideup_vx_i64m4_m | ( | ... | ) | __riscv_vslideup_vx_i64m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_i64m8 | ( | ... | ) | __riscv_vslideup_vx_i64m8_tu(__VA_ARGS__) |
| #define vslideup_vx_i64m8_m | ( | ... | ) | __riscv_vslideup_vx_i64m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_i8m1 | ( | ... | ) | __riscv_vslideup_vx_i8m1_tu(__VA_ARGS__) |
| #define vslideup_vx_i8m1_m | ( | ... | ) | __riscv_vslideup_vx_i8m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_i8m2 | ( | ... | ) | __riscv_vslideup_vx_i8m2_tu(__VA_ARGS__) |
| #define vslideup_vx_i8m2_m | ( | ... | ) | __riscv_vslideup_vx_i8m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_i8m4 | ( | ... | ) | __riscv_vslideup_vx_i8m4_tu(__VA_ARGS__) |
| #define vslideup_vx_i8m4_m | ( | ... | ) | __riscv_vslideup_vx_i8m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_i8m8 | ( | ... | ) | __riscv_vslideup_vx_i8m8_tu(__VA_ARGS__) |
| #define vslideup_vx_i8m8_m | ( | ... | ) | __riscv_vslideup_vx_i8m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_i8mf2 | ( | ... | ) | __riscv_vslideup_vx_i8mf2_tu(__VA_ARGS__) |
| #define vslideup_vx_i8mf2_m | ( | ... | ) | __riscv_vslideup_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vslideup_vx_i8mf4 | ( | ... | ) | __riscv_vslideup_vx_i8mf4_tu(__VA_ARGS__) |
| #define vslideup_vx_i8mf4_m | ( | ... | ) | __riscv_vslideup_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vslideup_vx_i8mf8 | ( | ... | ) | __riscv_vslideup_vx_i8mf8_tu(__VA_ARGS__) |
| #define vslideup_vx_i8mf8_m | ( | ... | ) | __riscv_vslideup_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vslideup_vx_u16m1 | ( | ... | ) | __riscv_vslideup_vx_u16m1_tu(__VA_ARGS__) |
| #define vslideup_vx_u16m1_m | ( | ... | ) | __riscv_vslideup_vx_u16m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_u16m2 | ( | ... | ) | __riscv_vslideup_vx_u16m2_tu(__VA_ARGS__) |
| #define vslideup_vx_u16m2_m | ( | ... | ) | __riscv_vslideup_vx_u16m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_u16m4 | ( | ... | ) | __riscv_vslideup_vx_u16m4_tu(__VA_ARGS__) |
| #define vslideup_vx_u16m4_m | ( | ... | ) | __riscv_vslideup_vx_u16m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_u16m8 | ( | ... | ) | __riscv_vslideup_vx_u16m8_tu(__VA_ARGS__) |
| #define vslideup_vx_u16m8_m | ( | ... | ) | __riscv_vslideup_vx_u16m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_u16mf2 | ( | ... | ) | __riscv_vslideup_vx_u16mf2_tu(__VA_ARGS__) |
| #define vslideup_vx_u16mf2_m | ( | ... | ) | __riscv_vslideup_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vslideup_vx_u16mf4 | ( | ... | ) | __riscv_vslideup_vx_u16mf4_tu(__VA_ARGS__) |
| #define vslideup_vx_u16mf4_m | ( | ... | ) | __riscv_vslideup_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vslideup_vx_u32m1 | ( | ... | ) | __riscv_vslideup_vx_u32m1_tu(__VA_ARGS__) |
| #define vslideup_vx_u32m1_m | ( | ... | ) | __riscv_vslideup_vx_u32m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_u32m2 | ( | ... | ) | __riscv_vslideup_vx_u32m2_tu(__VA_ARGS__) |
| #define vslideup_vx_u32m2_m | ( | ... | ) | __riscv_vslideup_vx_u32m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_u32m4 | ( | ... | ) | __riscv_vslideup_vx_u32m4_tu(__VA_ARGS__) |
| #define vslideup_vx_u32m4_m | ( | ... | ) | __riscv_vslideup_vx_u32m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_u32m8 | ( | ... | ) | __riscv_vslideup_vx_u32m8_tu(__VA_ARGS__) |
| #define vslideup_vx_u32m8_m | ( | ... | ) | __riscv_vslideup_vx_u32m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_u32mf2 | ( | ... | ) | __riscv_vslideup_vx_u32mf2_tu(__VA_ARGS__) |
| #define vslideup_vx_u32mf2_m | ( | ... | ) | __riscv_vslideup_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vslideup_vx_u64m1 | ( | ... | ) | __riscv_vslideup_vx_u64m1_tu(__VA_ARGS__) |
| #define vslideup_vx_u64m1_m | ( | ... | ) | __riscv_vslideup_vx_u64m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_u64m2 | ( | ... | ) | __riscv_vslideup_vx_u64m2_tu(__VA_ARGS__) |
| #define vslideup_vx_u64m2_m | ( | ... | ) | __riscv_vslideup_vx_u64m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_u64m4 | ( | ... | ) | __riscv_vslideup_vx_u64m4_tu(__VA_ARGS__) |
| #define vslideup_vx_u64m4_m | ( | ... | ) | __riscv_vslideup_vx_u64m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_u64m8 | ( | ... | ) | __riscv_vslideup_vx_u64m8_tu(__VA_ARGS__) |
| #define vslideup_vx_u64m8_m | ( | ... | ) | __riscv_vslideup_vx_u64m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_u8m1 | ( | ... | ) | __riscv_vslideup_vx_u8m1_tu(__VA_ARGS__) |
| #define vslideup_vx_u8m1_m | ( | ... | ) | __riscv_vslideup_vx_u8m1_tumu(__VA_ARGS__) |
| #define vslideup_vx_u8m2 | ( | ... | ) | __riscv_vslideup_vx_u8m2_tu(__VA_ARGS__) |
| #define vslideup_vx_u8m2_m | ( | ... | ) | __riscv_vslideup_vx_u8m2_tumu(__VA_ARGS__) |
| #define vslideup_vx_u8m4 | ( | ... | ) | __riscv_vslideup_vx_u8m4_tu(__VA_ARGS__) |
| #define vslideup_vx_u8m4_m | ( | ... | ) | __riscv_vslideup_vx_u8m4_tumu(__VA_ARGS__) |
| #define vslideup_vx_u8m8 | ( | ... | ) | __riscv_vslideup_vx_u8m8_tu(__VA_ARGS__) |
| #define vslideup_vx_u8m8_m | ( | ... | ) | __riscv_vslideup_vx_u8m8_tumu(__VA_ARGS__) |
| #define vslideup_vx_u8mf2 | ( | ... | ) | __riscv_vslideup_vx_u8mf2_tu(__VA_ARGS__) |
| #define vslideup_vx_u8mf2_m | ( | ... | ) | __riscv_vslideup_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vslideup_vx_u8mf4 | ( | ... | ) | __riscv_vslideup_vx_u8mf4_tu(__VA_ARGS__) |
| #define vslideup_vx_u8mf4_m | ( | ... | ) | __riscv_vslideup_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vslideup_vx_u8mf8 | ( | ... | ) | __riscv_vslideup_vx_u8mf8_tu(__VA_ARGS__) |
| #define vslideup_vx_u8mf8_m | ( | ... | ) | __riscv_vslideup_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vsll_vv_i16m1 | ( | ... | ) | __riscv_vsll_vv_i16m1(__VA_ARGS__) |
| #define vsll_vv_i16m1_m | ( | ... | ) | __riscv_vsll_vv_i16m1_tumu(__VA_ARGS__) |
| #define vsll_vv_i16m2 | ( | ... | ) | __riscv_vsll_vv_i16m2(__VA_ARGS__) |
| #define vsll_vv_i16m2_m | ( | ... | ) | __riscv_vsll_vv_i16m2_tumu(__VA_ARGS__) |
| #define vsll_vv_i16m4 | ( | ... | ) | __riscv_vsll_vv_i16m4(__VA_ARGS__) |
| #define vsll_vv_i16m4_m | ( | ... | ) | __riscv_vsll_vv_i16m4_tumu(__VA_ARGS__) |
| #define vsll_vv_i16m8 | ( | ... | ) | __riscv_vsll_vv_i16m8(__VA_ARGS__) |
| #define vsll_vv_i16m8_m | ( | ... | ) | __riscv_vsll_vv_i16m8_tumu(__VA_ARGS__) |
| #define vsll_vv_i16mf2 | ( | ... | ) | __riscv_vsll_vv_i16mf2(__VA_ARGS__) |
| #define vsll_vv_i16mf2_m | ( | ... | ) | __riscv_vsll_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vsll_vv_i16mf4 | ( | ... | ) | __riscv_vsll_vv_i16mf4(__VA_ARGS__) |
| #define vsll_vv_i16mf4_m | ( | ... | ) | __riscv_vsll_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vsll_vv_i32m1 | ( | ... | ) | __riscv_vsll_vv_i32m1(__VA_ARGS__) |
| #define vsll_vv_i32m1_m | ( | ... | ) | __riscv_vsll_vv_i32m1_tumu(__VA_ARGS__) |
| #define vsll_vv_i32m2 | ( | ... | ) | __riscv_vsll_vv_i32m2(__VA_ARGS__) |
| #define vsll_vv_i32m2_m | ( | ... | ) | __riscv_vsll_vv_i32m2_tumu(__VA_ARGS__) |
| #define vsll_vv_i32m4 | ( | ... | ) | __riscv_vsll_vv_i32m4(__VA_ARGS__) |
| #define vsll_vv_i32m4_m | ( | ... | ) | __riscv_vsll_vv_i32m4_tumu(__VA_ARGS__) |
| #define vsll_vv_i32m8 | ( | ... | ) | __riscv_vsll_vv_i32m8(__VA_ARGS__) |
| #define vsll_vv_i32m8_m | ( | ... | ) | __riscv_vsll_vv_i32m8_tumu(__VA_ARGS__) |
| #define vsll_vv_i32mf2 | ( | ... | ) | __riscv_vsll_vv_i32mf2(__VA_ARGS__) |
| #define vsll_vv_i32mf2_m | ( | ... | ) | __riscv_vsll_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vsll_vv_i64m1 | ( | ... | ) | __riscv_vsll_vv_i64m1(__VA_ARGS__) |
| #define vsll_vv_i64m1_m | ( | ... | ) | __riscv_vsll_vv_i64m1_tumu(__VA_ARGS__) |
| #define vsll_vv_i64m2 | ( | ... | ) | __riscv_vsll_vv_i64m2(__VA_ARGS__) |
| #define vsll_vv_i64m2_m | ( | ... | ) | __riscv_vsll_vv_i64m2_tumu(__VA_ARGS__) |
| #define vsll_vv_i64m4 | ( | ... | ) | __riscv_vsll_vv_i64m4(__VA_ARGS__) |
| #define vsll_vv_i64m4_m | ( | ... | ) | __riscv_vsll_vv_i64m4_tumu(__VA_ARGS__) |
| #define vsll_vv_i64m8 | ( | ... | ) | __riscv_vsll_vv_i64m8(__VA_ARGS__) |
| #define vsll_vv_i64m8_m | ( | ... | ) | __riscv_vsll_vv_i64m8_tumu(__VA_ARGS__) |
| #define vsll_vv_i8m1 | ( | ... | ) | __riscv_vsll_vv_i8m1(__VA_ARGS__) |
| #define vsll_vv_i8m1_m | ( | ... | ) | __riscv_vsll_vv_i8m1_tumu(__VA_ARGS__) |
| #define vsll_vv_i8m2 | ( | ... | ) | __riscv_vsll_vv_i8m2(__VA_ARGS__) |
| #define vsll_vv_i8m2_m | ( | ... | ) | __riscv_vsll_vv_i8m2_tumu(__VA_ARGS__) |
| #define vsll_vv_i8m4 | ( | ... | ) | __riscv_vsll_vv_i8m4(__VA_ARGS__) |
| #define vsll_vv_i8m4_m | ( | ... | ) | __riscv_vsll_vv_i8m4_tumu(__VA_ARGS__) |
| #define vsll_vv_i8m8 | ( | ... | ) | __riscv_vsll_vv_i8m8(__VA_ARGS__) |
| #define vsll_vv_i8m8_m | ( | ... | ) | __riscv_vsll_vv_i8m8_tumu(__VA_ARGS__) |
| #define vsll_vv_i8mf2 | ( | ... | ) | __riscv_vsll_vv_i8mf2(__VA_ARGS__) |
| #define vsll_vv_i8mf2_m | ( | ... | ) | __riscv_vsll_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vsll_vv_i8mf4 | ( | ... | ) | __riscv_vsll_vv_i8mf4(__VA_ARGS__) |
| #define vsll_vv_i8mf4_m | ( | ... | ) | __riscv_vsll_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vsll_vv_i8mf8 | ( | ... | ) | __riscv_vsll_vv_i8mf8(__VA_ARGS__) |
| #define vsll_vv_i8mf8_m | ( | ... | ) | __riscv_vsll_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vsll_vv_u16m1 | ( | ... | ) | __riscv_vsll_vv_u16m1(__VA_ARGS__) |
| #define vsll_vv_u16m1_m | ( | ... | ) | __riscv_vsll_vv_u16m1_tumu(__VA_ARGS__) |
| #define vsll_vv_u16m2 | ( | ... | ) | __riscv_vsll_vv_u16m2(__VA_ARGS__) |
| #define vsll_vv_u16m2_m | ( | ... | ) | __riscv_vsll_vv_u16m2_tumu(__VA_ARGS__) |
| #define vsll_vv_u16m4 | ( | ... | ) | __riscv_vsll_vv_u16m4(__VA_ARGS__) |
| #define vsll_vv_u16m4_m | ( | ... | ) | __riscv_vsll_vv_u16m4_tumu(__VA_ARGS__) |
| #define vsll_vv_u16m8 | ( | ... | ) | __riscv_vsll_vv_u16m8(__VA_ARGS__) |
| #define vsll_vv_u16m8_m | ( | ... | ) | __riscv_vsll_vv_u16m8_tumu(__VA_ARGS__) |
| #define vsll_vv_u16mf2 | ( | ... | ) | __riscv_vsll_vv_u16mf2(__VA_ARGS__) |
| #define vsll_vv_u16mf2_m | ( | ... | ) | __riscv_vsll_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vsll_vv_u16mf4 | ( | ... | ) | __riscv_vsll_vv_u16mf4(__VA_ARGS__) |
| #define vsll_vv_u16mf4_m | ( | ... | ) | __riscv_vsll_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vsll_vv_u32m1 | ( | ... | ) | __riscv_vsll_vv_u32m1(__VA_ARGS__) |
| #define vsll_vv_u32m1_m | ( | ... | ) | __riscv_vsll_vv_u32m1_tumu(__VA_ARGS__) |
| #define vsll_vv_u32m2 | ( | ... | ) | __riscv_vsll_vv_u32m2(__VA_ARGS__) |
| #define vsll_vv_u32m2_m | ( | ... | ) | __riscv_vsll_vv_u32m2_tumu(__VA_ARGS__) |
| #define vsll_vv_u32m4 | ( | ... | ) | __riscv_vsll_vv_u32m4(__VA_ARGS__) |
| #define vsll_vv_u32m4_m | ( | ... | ) | __riscv_vsll_vv_u32m4_tumu(__VA_ARGS__) |
| #define vsll_vv_u32m8 | ( | ... | ) | __riscv_vsll_vv_u32m8(__VA_ARGS__) |
| #define vsll_vv_u32m8_m | ( | ... | ) | __riscv_vsll_vv_u32m8_tumu(__VA_ARGS__) |
| #define vsll_vv_u32mf2 | ( | ... | ) | __riscv_vsll_vv_u32mf2(__VA_ARGS__) |
| #define vsll_vv_u32mf2_m | ( | ... | ) | __riscv_vsll_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vsll_vv_u64m1 | ( | ... | ) | __riscv_vsll_vv_u64m1(__VA_ARGS__) |
| #define vsll_vv_u64m1_m | ( | ... | ) | __riscv_vsll_vv_u64m1_tumu(__VA_ARGS__) |
| #define vsll_vv_u64m2 | ( | ... | ) | __riscv_vsll_vv_u64m2(__VA_ARGS__) |
| #define vsll_vv_u64m2_m | ( | ... | ) | __riscv_vsll_vv_u64m2_tumu(__VA_ARGS__) |
| #define vsll_vv_u64m4 | ( | ... | ) | __riscv_vsll_vv_u64m4(__VA_ARGS__) |
| #define vsll_vv_u64m4_m | ( | ... | ) | __riscv_vsll_vv_u64m4_tumu(__VA_ARGS__) |
| #define vsll_vv_u64m8 | ( | ... | ) | __riscv_vsll_vv_u64m8(__VA_ARGS__) |
| #define vsll_vv_u64m8_m | ( | ... | ) | __riscv_vsll_vv_u64m8_tumu(__VA_ARGS__) |
| #define vsll_vv_u8m1 | ( | ... | ) | __riscv_vsll_vv_u8m1(__VA_ARGS__) |
| #define vsll_vv_u8m1_m | ( | ... | ) | __riscv_vsll_vv_u8m1_tumu(__VA_ARGS__) |
| #define vsll_vv_u8m2 | ( | ... | ) | __riscv_vsll_vv_u8m2(__VA_ARGS__) |
| #define vsll_vv_u8m2_m | ( | ... | ) | __riscv_vsll_vv_u8m2_tumu(__VA_ARGS__) |
| #define vsll_vv_u8m4 | ( | ... | ) | __riscv_vsll_vv_u8m4(__VA_ARGS__) |
| #define vsll_vv_u8m4_m | ( | ... | ) | __riscv_vsll_vv_u8m4_tumu(__VA_ARGS__) |
| #define vsll_vv_u8m8 | ( | ... | ) | __riscv_vsll_vv_u8m8(__VA_ARGS__) |
| #define vsll_vv_u8m8_m | ( | ... | ) | __riscv_vsll_vv_u8m8_tumu(__VA_ARGS__) |
| #define vsll_vv_u8mf2 | ( | ... | ) | __riscv_vsll_vv_u8mf2(__VA_ARGS__) |
| #define vsll_vv_u8mf2_m | ( | ... | ) | __riscv_vsll_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vsll_vv_u8mf4 | ( | ... | ) | __riscv_vsll_vv_u8mf4(__VA_ARGS__) |
| #define vsll_vv_u8mf4_m | ( | ... | ) | __riscv_vsll_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vsll_vv_u8mf8 | ( | ... | ) | __riscv_vsll_vv_u8mf8(__VA_ARGS__) |
| #define vsll_vv_u8mf8_m | ( | ... | ) | __riscv_vsll_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vsll_vx_i16m1 | ( | ... | ) | __riscv_vsll_vx_i16m1(__VA_ARGS__) |
| #define vsll_vx_i16m1_m | ( | ... | ) | __riscv_vsll_vx_i16m1_tumu(__VA_ARGS__) |
| #define vsll_vx_i16m2 | ( | ... | ) | __riscv_vsll_vx_i16m2(__VA_ARGS__) |
| #define vsll_vx_i16m2_m | ( | ... | ) | __riscv_vsll_vx_i16m2_tumu(__VA_ARGS__) |
| #define vsll_vx_i16m4 | ( | ... | ) | __riscv_vsll_vx_i16m4(__VA_ARGS__) |
| #define vsll_vx_i16m4_m | ( | ... | ) | __riscv_vsll_vx_i16m4_tumu(__VA_ARGS__) |
| #define vsll_vx_i16m8 | ( | ... | ) | __riscv_vsll_vx_i16m8(__VA_ARGS__) |
| #define vsll_vx_i16m8_m | ( | ... | ) | __riscv_vsll_vx_i16m8_tumu(__VA_ARGS__) |
| #define vsll_vx_i16mf2 | ( | ... | ) | __riscv_vsll_vx_i16mf2(__VA_ARGS__) |
| #define vsll_vx_i16mf2_m | ( | ... | ) | __riscv_vsll_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vsll_vx_i16mf4 | ( | ... | ) | __riscv_vsll_vx_i16mf4(__VA_ARGS__) |
| #define vsll_vx_i16mf4_m | ( | ... | ) | __riscv_vsll_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vsll_vx_i32m1 | ( | ... | ) | __riscv_vsll_vx_i32m1(__VA_ARGS__) |
| #define vsll_vx_i32m1_m | ( | ... | ) | __riscv_vsll_vx_i32m1_tumu(__VA_ARGS__) |
| #define vsll_vx_i32m2 | ( | ... | ) | __riscv_vsll_vx_i32m2(__VA_ARGS__) |
| #define vsll_vx_i32m2_m | ( | ... | ) | __riscv_vsll_vx_i32m2_tumu(__VA_ARGS__) |
| #define vsll_vx_i32m4 | ( | ... | ) | __riscv_vsll_vx_i32m4(__VA_ARGS__) |
| #define vsll_vx_i32m4_m | ( | ... | ) | __riscv_vsll_vx_i32m4_tumu(__VA_ARGS__) |
| #define vsll_vx_i32m8 | ( | ... | ) | __riscv_vsll_vx_i32m8(__VA_ARGS__) |
| #define vsll_vx_i32m8_m | ( | ... | ) | __riscv_vsll_vx_i32m8_tumu(__VA_ARGS__) |
| #define vsll_vx_i32mf2 | ( | ... | ) | __riscv_vsll_vx_i32mf2(__VA_ARGS__) |
| #define vsll_vx_i32mf2_m | ( | ... | ) | __riscv_vsll_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vsll_vx_i64m1 | ( | ... | ) | __riscv_vsll_vx_i64m1(__VA_ARGS__) |
| #define vsll_vx_i64m1_m | ( | ... | ) | __riscv_vsll_vx_i64m1_tumu(__VA_ARGS__) |
| #define vsll_vx_i64m2 | ( | ... | ) | __riscv_vsll_vx_i64m2(__VA_ARGS__) |
| #define vsll_vx_i64m2_m | ( | ... | ) | __riscv_vsll_vx_i64m2_tumu(__VA_ARGS__) |
| #define vsll_vx_i64m4 | ( | ... | ) | __riscv_vsll_vx_i64m4(__VA_ARGS__) |
| #define vsll_vx_i64m4_m | ( | ... | ) | __riscv_vsll_vx_i64m4_tumu(__VA_ARGS__) |
| #define vsll_vx_i64m8 | ( | ... | ) | __riscv_vsll_vx_i64m8(__VA_ARGS__) |
| #define vsll_vx_i64m8_m | ( | ... | ) | __riscv_vsll_vx_i64m8_tumu(__VA_ARGS__) |
| #define vsll_vx_i8m1 | ( | ... | ) | __riscv_vsll_vx_i8m1(__VA_ARGS__) |
| #define vsll_vx_i8m1_m | ( | ... | ) | __riscv_vsll_vx_i8m1_tumu(__VA_ARGS__) |
| #define vsll_vx_i8m2 | ( | ... | ) | __riscv_vsll_vx_i8m2(__VA_ARGS__) |
| #define vsll_vx_i8m2_m | ( | ... | ) | __riscv_vsll_vx_i8m2_tumu(__VA_ARGS__) |
| #define vsll_vx_i8m4 | ( | ... | ) | __riscv_vsll_vx_i8m4(__VA_ARGS__) |
| #define vsll_vx_i8m4_m | ( | ... | ) | __riscv_vsll_vx_i8m4_tumu(__VA_ARGS__) |
| #define vsll_vx_i8m8 | ( | ... | ) | __riscv_vsll_vx_i8m8(__VA_ARGS__) |
| #define vsll_vx_i8m8_m | ( | ... | ) | __riscv_vsll_vx_i8m8_tumu(__VA_ARGS__) |
| #define vsll_vx_i8mf2 | ( | ... | ) | __riscv_vsll_vx_i8mf2(__VA_ARGS__) |
| #define vsll_vx_i8mf2_m | ( | ... | ) | __riscv_vsll_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vsll_vx_i8mf4 | ( | ... | ) | __riscv_vsll_vx_i8mf4(__VA_ARGS__) |
| #define vsll_vx_i8mf4_m | ( | ... | ) | __riscv_vsll_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vsll_vx_i8mf8 | ( | ... | ) | __riscv_vsll_vx_i8mf8(__VA_ARGS__) |
| #define vsll_vx_i8mf8_m | ( | ... | ) | __riscv_vsll_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vsll_vx_u16m1 | ( | ... | ) | __riscv_vsll_vx_u16m1(__VA_ARGS__) |
| #define vsll_vx_u16m1_m | ( | ... | ) | __riscv_vsll_vx_u16m1_tumu(__VA_ARGS__) |
| #define vsll_vx_u16m2 | ( | ... | ) | __riscv_vsll_vx_u16m2(__VA_ARGS__) |
| #define vsll_vx_u16m2_m | ( | ... | ) | __riscv_vsll_vx_u16m2_tumu(__VA_ARGS__) |
| #define vsll_vx_u16m4 | ( | ... | ) | __riscv_vsll_vx_u16m4(__VA_ARGS__) |
| #define vsll_vx_u16m4_m | ( | ... | ) | __riscv_vsll_vx_u16m4_tumu(__VA_ARGS__) |
| #define vsll_vx_u16m8 | ( | ... | ) | __riscv_vsll_vx_u16m8(__VA_ARGS__) |
| #define vsll_vx_u16m8_m | ( | ... | ) | __riscv_vsll_vx_u16m8_tumu(__VA_ARGS__) |
| #define vsll_vx_u16mf2 | ( | ... | ) | __riscv_vsll_vx_u16mf2(__VA_ARGS__) |
| #define vsll_vx_u16mf2_m | ( | ... | ) | __riscv_vsll_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vsll_vx_u16mf4 | ( | ... | ) | __riscv_vsll_vx_u16mf4(__VA_ARGS__) |
| #define vsll_vx_u16mf4_m | ( | ... | ) | __riscv_vsll_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vsll_vx_u32m1 | ( | ... | ) | __riscv_vsll_vx_u32m1(__VA_ARGS__) |
| #define vsll_vx_u32m1_m | ( | ... | ) | __riscv_vsll_vx_u32m1_tumu(__VA_ARGS__) |
| #define vsll_vx_u32m2 | ( | ... | ) | __riscv_vsll_vx_u32m2(__VA_ARGS__) |
| #define vsll_vx_u32m2_m | ( | ... | ) | __riscv_vsll_vx_u32m2_tumu(__VA_ARGS__) |
| #define vsll_vx_u32m4 | ( | ... | ) | __riscv_vsll_vx_u32m4(__VA_ARGS__) |
| #define vsll_vx_u32m4_m | ( | ... | ) | __riscv_vsll_vx_u32m4_tumu(__VA_ARGS__) |
| #define vsll_vx_u32m8 | ( | ... | ) | __riscv_vsll_vx_u32m8(__VA_ARGS__) |
| #define vsll_vx_u32m8_m | ( | ... | ) | __riscv_vsll_vx_u32m8_tumu(__VA_ARGS__) |
| #define vsll_vx_u32mf2 | ( | ... | ) | __riscv_vsll_vx_u32mf2(__VA_ARGS__) |
| #define vsll_vx_u32mf2_m | ( | ... | ) | __riscv_vsll_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vsll_vx_u64m1 | ( | ... | ) | __riscv_vsll_vx_u64m1(__VA_ARGS__) |
| #define vsll_vx_u64m1_m | ( | ... | ) | __riscv_vsll_vx_u64m1_tumu(__VA_ARGS__) |
| #define vsll_vx_u64m2 | ( | ... | ) | __riscv_vsll_vx_u64m2(__VA_ARGS__) |
| #define vsll_vx_u64m2_m | ( | ... | ) | __riscv_vsll_vx_u64m2_tumu(__VA_ARGS__) |
| #define vsll_vx_u64m4 | ( | ... | ) | __riscv_vsll_vx_u64m4(__VA_ARGS__) |
| #define vsll_vx_u64m4_m | ( | ... | ) | __riscv_vsll_vx_u64m4_tumu(__VA_ARGS__) |
| #define vsll_vx_u64m8 | ( | ... | ) | __riscv_vsll_vx_u64m8(__VA_ARGS__) |
| #define vsll_vx_u64m8_m | ( | ... | ) | __riscv_vsll_vx_u64m8_tumu(__VA_ARGS__) |
| #define vsll_vx_u8m1 | ( | ... | ) | __riscv_vsll_vx_u8m1(__VA_ARGS__) |
| #define vsll_vx_u8m1_m | ( | ... | ) | __riscv_vsll_vx_u8m1_tumu(__VA_ARGS__) |
| #define vsll_vx_u8m2 | ( | ... | ) | __riscv_vsll_vx_u8m2(__VA_ARGS__) |
| #define vsll_vx_u8m2_m | ( | ... | ) | __riscv_vsll_vx_u8m2_tumu(__VA_ARGS__) |
| #define vsll_vx_u8m4 | ( | ... | ) | __riscv_vsll_vx_u8m4(__VA_ARGS__) |
| #define vsll_vx_u8m4_m | ( | ... | ) | __riscv_vsll_vx_u8m4_tumu(__VA_ARGS__) |
| #define vsll_vx_u8m8 | ( | ... | ) | __riscv_vsll_vx_u8m8(__VA_ARGS__) |
| #define vsll_vx_u8m8_m | ( | ... | ) | __riscv_vsll_vx_u8m8_tumu(__VA_ARGS__) |
| #define vsll_vx_u8mf2 | ( | ... | ) | __riscv_vsll_vx_u8mf2(__VA_ARGS__) |
| #define vsll_vx_u8mf2_m | ( | ... | ) | __riscv_vsll_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vsll_vx_u8mf4 | ( | ... | ) | __riscv_vsll_vx_u8mf4(__VA_ARGS__) |
| #define vsll_vx_u8mf4_m | ( | ... | ) | __riscv_vsll_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vsll_vx_u8mf8 | ( | ... | ) | __riscv_vsll_vx_u8mf8(__VA_ARGS__) |
| #define vsll_vx_u8mf8_m | ( | ... | ) | __riscv_vsll_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vsm_v_b1 | ( | ... | ) | __riscv_vsm_v_b1(__VA_ARGS__) |
| #define vsm_v_b16 | ( | ... | ) | __riscv_vsm_v_b16(__VA_ARGS__) |
| #define vsm_v_b2 | ( | ... | ) | __riscv_vsm_v_b2(__VA_ARGS__) |
| #define vsm_v_b32 | ( | ... | ) | __riscv_vsm_v_b32(__VA_ARGS__) |
| #define vsm_v_b4 | ( | ... | ) | __riscv_vsm_v_b4(__VA_ARGS__) |
| #define vsm_v_b64 | ( | ... | ) | __riscv_vsm_v_b64(__VA_ARGS__) |
| #define vsm_v_b8 | ( | ... | ) | __riscv_vsm_v_b8(__VA_ARGS__) |
| #define vsmul_vv_i16m1 | ( | ... | ) | __riscv_vsmul_vv_i16m1(__VA_ARGS__) |
| #define vsmul_vv_i16m1_m | ( | ... | ) | __riscv_vsmul_vv_i16m1_mu(__VA_ARGS__) |
| #define vsmul_vv_i16m2 | ( | ... | ) | __riscv_vsmul_vv_i16m2(__VA_ARGS__) |
| #define vsmul_vv_i16m2_m | ( | ... | ) | __riscv_vsmul_vv_i16m2_mu(__VA_ARGS__) |
| #define vsmul_vv_i16m4 | ( | ... | ) | __riscv_vsmul_vv_i16m4(__VA_ARGS__) |
| #define vsmul_vv_i16m4_m | ( | ... | ) | __riscv_vsmul_vv_i16m4_mu(__VA_ARGS__) |
| #define vsmul_vv_i16m8 | ( | ... | ) | __riscv_vsmul_vv_i16m8(__VA_ARGS__) |
| #define vsmul_vv_i16m8_m | ( | ... | ) | __riscv_vsmul_vv_i16m8_mu(__VA_ARGS__) |
| #define vsmul_vv_i16mf2 | ( | ... | ) | __riscv_vsmul_vv_i16mf2(__VA_ARGS__) |
| #define vsmul_vv_i16mf2_m | ( | ... | ) | __riscv_vsmul_vv_i16mf2_mu(__VA_ARGS__) |
| #define vsmul_vv_i16mf4 | ( | ... | ) | __riscv_vsmul_vv_i16mf4(__VA_ARGS__) |
| #define vsmul_vv_i16mf4_m | ( | ... | ) | __riscv_vsmul_vv_i16mf4_mu(__VA_ARGS__) |
| #define vsmul_vv_i32m1 | ( | ... | ) | __riscv_vsmul_vv_i32m1(__VA_ARGS__) |
| #define vsmul_vv_i32m1_m | ( | ... | ) | __riscv_vsmul_vv_i32m1_mu(__VA_ARGS__) |
| #define vsmul_vv_i32m2 | ( | ... | ) | __riscv_vsmul_vv_i32m2(__VA_ARGS__) |
| #define vsmul_vv_i32m2_m | ( | ... | ) | __riscv_vsmul_vv_i32m2_mu(__VA_ARGS__) |
| #define vsmul_vv_i32m4 | ( | ... | ) | __riscv_vsmul_vv_i32m4(__VA_ARGS__) |
| #define vsmul_vv_i32m4_m | ( | ... | ) | __riscv_vsmul_vv_i32m4_mu(__VA_ARGS__) |
| #define vsmul_vv_i32m8 | ( | ... | ) | __riscv_vsmul_vv_i32m8(__VA_ARGS__) |
| #define vsmul_vv_i32m8_m | ( | ... | ) | __riscv_vsmul_vv_i32m8_mu(__VA_ARGS__) |
| #define vsmul_vv_i32mf2 | ( | ... | ) | __riscv_vsmul_vv_i32mf2(__VA_ARGS__) |
| #define vsmul_vv_i32mf2_m | ( | ... | ) | __riscv_vsmul_vv_i32mf2_mu(__VA_ARGS__) |
| #define vsmul_vv_i64m1 | ( | ... | ) | __riscv_vsmul_vv_i64m1(__VA_ARGS__) |
| #define vsmul_vv_i64m1_m | ( | ... | ) | __riscv_vsmul_vv_i64m1_mu(__VA_ARGS__) |
| #define vsmul_vv_i64m2 | ( | ... | ) | __riscv_vsmul_vv_i64m2(__VA_ARGS__) |
| #define vsmul_vv_i64m2_m | ( | ... | ) | __riscv_vsmul_vv_i64m2_mu(__VA_ARGS__) |
| #define vsmul_vv_i64m4 | ( | ... | ) | __riscv_vsmul_vv_i64m4(__VA_ARGS__) |
| #define vsmul_vv_i64m4_m | ( | ... | ) | __riscv_vsmul_vv_i64m4_mu(__VA_ARGS__) |
| #define vsmul_vv_i64m8 | ( | ... | ) | __riscv_vsmul_vv_i64m8(__VA_ARGS__) |
| #define vsmul_vv_i64m8_m | ( | ... | ) | __riscv_vsmul_vv_i64m8_mu(__VA_ARGS__) |
| #define vsmul_vv_i8m1 | ( | ... | ) | __riscv_vsmul_vv_i8m1(__VA_ARGS__) |
| #define vsmul_vv_i8m1_m | ( | ... | ) | __riscv_vsmul_vv_i8m1_mu(__VA_ARGS__) |
| #define vsmul_vv_i8m2 | ( | ... | ) | __riscv_vsmul_vv_i8m2(__VA_ARGS__) |
| #define vsmul_vv_i8m2_m | ( | ... | ) | __riscv_vsmul_vv_i8m2_mu(__VA_ARGS__) |
| #define vsmul_vv_i8m4 | ( | ... | ) | __riscv_vsmul_vv_i8m4(__VA_ARGS__) |
| #define vsmul_vv_i8m4_m | ( | ... | ) | __riscv_vsmul_vv_i8m4_mu(__VA_ARGS__) |
| #define vsmul_vv_i8m8 | ( | ... | ) | __riscv_vsmul_vv_i8m8(__VA_ARGS__) |
| #define vsmul_vv_i8m8_m | ( | ... | ) | __riscv_vsmul_vv_i8m8_mu(__VA_ARGS__) |
| #define vsmul_vv_i8mf2 | ( | ... | ) | __riscv_vsmul_vv_i8mf2(__VA_ARGS__) |
| #define vsmul_vv_i8mf2_m | ( | ... | ) | __riscv_vsmul_vv_i8mf2_mu(__VA_ARGS__) |
| #define vsmul_vv_i8mf4 | ( | ... | ) | __riscv_vsmul_vv_i8mf4(__VA_ARGS__) |
| #define vsmul_vv_i8mf4_m | ( | ... | ) | __riscv_vsmul_vv_i8mf4_mu(__VA_ARGS__) |
| #define vsmul_vv_i8mf8 | ( | ... | ) | __riscv_vsmul_vv_i8mf8(__VA_ARGS__) |
| #define vsmul_vv_i8mf8_m | ( | ... | ) | __riscv_vsmul_vv_i8mf8_mu(__VA_ARGS__) |
| #define vsmul_vx_i16m1 | ( | ... | ) | __riscv_vsmul_vx_i16m1(__VA_ARGS__) |
| #define vsmul_vx_i16m1_m | ( | ... | ) | __riscv_vsmul_vx_i16m1_mu(__VA_ARGS__) |
| #define vsmul_vx_i16m2 | ( | ... | ) | __riscv_vsmul_vx_i16m2(__VA_ARGS__) |
| #define vsmul_vx_i16m2_m | ( | ... | ) | __riscv_vsmul_vx_i16m2_mu(__VA_ARGS__) |
| #define vsmul_vx_i16m4 | ( | ... | ) | __riscv_vsmul_vx_i16m4(__VA_ARGS__) |
| #define vsmul_vx_i16m4_m | ( | ... | ) | __riscv_vsmul_vx_i16m4_mu(__VA_ARGS__) |
| #define vsmul_vx_i16m8 | ( | ... | ) | __riscv_vsmul_vx_i16m8(__VA_ARGS__) |
| #define vsmul_vx_i16m8_m | ( | ... | ) | __riscv_vsmul_vx_i16m8_mu(__VA_ARGS__) |
| #define vsmul_vx_i16mf2 | ( | ... | ) | __riscv_vsmul_vx_i16mf2(__VA_ARGS__) |
| #define vsmul_vx_i16mf2_m | ( | ... | ) | __riscv_vsmul_vx_i16mf2_mu(__VA_ARGS__) |
| #define vsmul_vx_i16mf4 | ( | ... | ) | __riscv_vsmul_vx_i16mf4(__VA_ARGS__) |
| #define vsmul_vx_i16mf4_m | ( | ... | ) | __riscv_vsmul_vx_i16mf4_mu(__VA_ARGS__) |
| #define vsmul_vx_i32m1 | ( | ... | ) | __riscv_vsmul_vx_i32m1(__VA_ARGS__) |
| #define vsmul_vx_i32m1_m | ( | ... | ) | __riscv_vsmul_vx_i32m1_mu(__VA_ARGS__) |
| #define vsmul_vx_i32m2 | ( | ... | ) | __riscv_vsmul_vx_i32m2(__VA_ARGS__) |
| #define vsmul_vx_i32m2_m | ( | ... | ) | __riscv_vsmul_vx_i32m2_mu(__VA_ARGS__) |
| #define vsmul_vx_i32m4 | ( | ... | ) | __riscv_vsmul_vx_i32m4(__VA_ARGS__) |
| #define vsmul_vx_i32m4_m | ( | ... | ) | __riscv_vsmul_vx_i32m4_mu(__VA_ARGS__) |
| #define vsmul_vx_i32m8 | ( | ... | ) | __riscv_vsmul_vx_i32m8(__VA_ARGS__) |
| #define vsmul_vx_i32m8_m | ( | ... | ) | __riscv_vsmul_vx_i32m8_mu(__VA_ARGS__) |
| #define vsmul_vx_i32mf2 | ( | ... | ) | __riscv_vsmul_vx_i32mf2(__VA_ARGS__) |
| #define vsmul_vx_i32mf2_m | ( | ... | ) | __riscv_vsmul_vx_i32mf2_mu(__VA_ARGS__) |
| #define vsmul_vx_i64m1 | ( | ... | ) | __riscv_vsmul_vx_i64m1(__VA_ARGS__) |
| #define vsmul_vx_i64m1_m | ( | ... | ) | __riscv_vsmul_vx_i64m1_mu(__VA_ARGS__) |
| #define vsmul_vx_i64m2 | ( | ... | ) | __riscv_vsmul_vx_i64m2(__VA_ARGS__) |
| #define vsmul_vx_i64m2_m | ( | ... | ) | __riscv_vsmul_vx_i64m2_mu(__VA_ARGS__) |
| #define vsmul_vx_i64m4 | ( | ... | ) | __riscv_vsmul_vx_i64m4(__VA_ARGS__) |
| #define vsmul_vx_i64m4_m | ( | ... | ) | __riscv_vsmul_vx_i64m4_mu(__VA_ARGS__) |
| #define vsmul_vx_i64m8 | ( | ... | ) | __riscv_vsmul_vx_i64m8(__VA_ARGS__) |
| #define vsmul_vx_i64m8_m | ( | ... | ) | __riscv_vsmul_vx_i64m8_mu(__VA_ARGS__) |
| #define vsmul_vx_i8m1 | ( | ... | ) | __riscv_vsmul_vx_i8m1(__VA_ARGS__) |
| #define vsmul_vx_i8m1_m | ( | ... | ) | __riscv_vsmul_vx_i8m1_mu(__VA_ARGS__) |
| #define vsmul_vx_i8m2 | ( | ... | ) | __riscv_vsmul_vx_i8m2(__VA_ARGS__) |
| #define vsmul_vx_i8m2_m | ( | ... | ) | __riscv_vsmul_vx_i8m2_mu(__VA_ARGS__) |
| #define vsmul_vx_i8m4 | ( | ... | ) | __riscv_vsmul_vx_i8m4(__VA_ARGS__) |
| #define vsmul_vx_i8m4_m | ( | ... | ) | __riscv_vsmul_vx_i8m4_mu(__VA_ARGS__) |
| #define vsmul_vx_i8m8 | ( | ... | ) | __riscv_vsmul_vx_i8m8(__VA_ARGS__) |
| #define vsmul_vx_i8m8_m | ( | ... | ) | __riscv_vsmul_vx_i8m8_mu(__VA_ARGS__) |
| #define vsmul_vx_i8mf2 | ( | ... | ) | __riscv_vsmul_vx_i8mf2(__VA_ARGS__) |
| #define vsmul_vx_i8mf2_m | ( | ... | ) | __riscv_vsmul_vx_i8mf2_mu(__VA_ARGS__) |
| #define vsmul_vx_i8mf4 | ( | ... | ) | __riscv_vsmul_vx_i8mf4(__VA_ARGS__) |
| #define vsmul_vx_i8mf4_m | ( | ... | ) | __riscv_vsmul_vx_i8mf4_mu(__VA_ARGS__) |
| #define vsmul_vx_i8mf8 | ( | ... | ) | __riscv_vsmul_vx_i8mf8(__VA_ARGS__) |
| #define vsmul_vx_i8mf8_m | ( | ... | ) | __riscv_vsmul_vx_i8mf8_mu(__VA_ARGS__) |
| #define vsoxei16_v_f16m1 | ( | ... | ) | __riscv_vsoxei16_v_f16m1(__VA_ARGS__) |
| #define vsoxei16_v_f16m1_m | ( | ... | ) | __riscv_vsoxei16_v_f16m1_m(__VA_ARGS__) |
| #define vsoxei16_v_f16m2 | ( | ... | ) | __riscv_vsoxei16_v_f16m2(__VA_ARGS__) |
| #define vsoxei16_v_f16m2_m | ( | ... | ) | __riscv_vsoxei16_v_f16m2_m(__VA_ARGS__) |
| #define vsoxei16_v_f16m4 | ( | ... | ) | __riscv_vsoxei16_v_f16m4(__VA_ARGS__) |
| #define vsoxei16_v_f16m4_m | ( | ... | ) | __riscv_vsoxei16_v_f16m4_m(__VA_ARGS__) |
| #define vsoxei16_v_f16m8 | ( | ... | ) | __riscv_vsoxei16_v_f16m8(__VA_ARGS__) |
| #define vsoxei16_v_f16m8_m | ( | ... | ) | __riscv_vsoxei16_v_f16m8_m(__VA_ARGS__) |
| #define vsoxei16_v_f16mf2 | ( | ... | ) | __riscv_vsoxei16_v_f16mf2(__VA_ARGS__) |
| #define vsoxei16_v_f16mf2_m | ( | ... | ) | __riscv_vsoxei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxei16_v_f16mf4 | ( | ... | ) | __riscv_vsoxei16_v_f16mf4(__VA_ARGS__) |
| #define vsoxei16_v_f16mf4_m | ( | ... | ) | __riscv_vsoxei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxei16_v_f32m1 | ( | ... | ) | __riscv_vsoxei16_v_f32m1(__VA_ARGS__) |
| #define vsoxei16_v_f32m1_m | ( | ... | ) | __riscv_vsoxei16_v_f32m1_m(__VA_ARGS__) |
| #define vsoxei16_v_f32m2 | ( | ... | ) | __riscv_vsoxei16_v_f32m2(__VA_ARGS__) |
| #define vsoxei16_v_f32m2_m | ( | ... | ) | __riscv_vsoxei16_v_f32m2_m(__VA_ARGS__) |
| #define vsoxei16_v_f32m4 | ( | ... | ) | __riscv_vsoxei16_v_f32m4(__VA_ARGS__) |
| #define vsoxei16_v_f32m4_m | ( | ... | ) | __riscv_vsoxei16_v_f32m4_m(__VA_ARGS__) |
| #define vsoxei16_v_f32m8 | ( | ... | ) | __riscv_vsoxei16_v_f32m8(__VA_ARGS__) |
| #define vsoxei16_v_f32m8_m | ( | ... | ) | __riscv_vsoxei16_v_f32m8_m(__VA_ARGS__) |
| #define vsoxei16_v_f32mf2 | ( | ... | ) | __riscv_vsoxei16_v_f32mf2(__VA_ARGS__) |
| #define vsoxei16_v_f32mf2_m | ( | ... | ) | __riscv_vsoxei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxei16_v_f64m1 | ( | ... | ) | __riscv_vsoxei16_v_f64m1(__VA_ARGS__) |
| #define vsoxei16_v_f64m1_m | ( | ... | ) | __riscv_vsoxei16_v_f64m1_m(__VA_ARGS__) |
| #define vsoxei16_v_f64m2 | ( | ... | ) | __riscv_vsoxei16_v_f64m2(__VA_ARGS__) |
| #define vsoxei16_v_f64m2_m | ( | ... | ) | __riscv_vsoxei16_v_f64m2_m(__VA_ARGS__) |
| #define vsoxei16_v_f64m4 | ( | ... | ) | __riscv_vsoxei16_v_f64m4(__VA_ARGS__) |
| #define vsoxei16_v_f64m4_m | ( | ... | ) | __riscv_vsoxei16_v_f64m4_m(__VA_ARGS__) |
| #define vsoxei16_v_f64m8 | ( | ... | ) | __riscv_vsoxei16_v_f64m8(__VA_ARGS__) |
| #define vsoxei16_v_f64m8_m | ( | ... | ) | __riscv_vsoxei16_v_f64m8_m(__VA_ARGS__) |
| #define vsoxei16_v_i16m1 | ( | ... | ) | __riscv_vsoxei16_v_i16m1(__VA_ARGS__) |
| #define vsoxei16_v_i16m1_m | ( | ... | ) | __riscv_vsoxei16_v_i16m1_m(__VA_ARGS__) |
| #define vsoxei16_v_i16m2 | ( | ... | ) | __riscv_vsoxei16_v_i16m2(__VA_ARGS__) |
| #define vsoxei16_v_i16m2_m | ( | ... | ) | __riscv_vsoxei16_v_i16m2_m(__VA_ARGS__) |
| #define vsoxei16_v_i16m4 | ( | ... | ) | __riscv_vsoxei16_v_i16m4(__VA_ARGS__) |
| #define vsoxei16_v_i16m4_m | ( | ... | ) | __riscv_vsoxei16_v_i16m4_m(__VA_ARGS__) |
| #define vsoxei16_v_i16m8 | ( | ... | ) | __riscv_vsoxei16_v_i16m8(__VA_ARGS__) |
| #define vsoxei16_v_i16m8_m | ( | ... | ) | __riscv_vsoxei16_v_i16m8_m(__VA_ARGS__) |
| #define vsoxei16_v_i16mf2 | ( | ... | ) | __riscv_vsoxei16_v_i16mf2(__VA_ARGS__) |
| #define vsoxei16_v_i16mf2_m | ( | ... | ) | __riscv_vsoxei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxei16_v_i16mf4 | ( | ... | ) | __riscv_vsoxei16_v_i16mf4(__VA_ARGS__) |
| #define vsoxei16_v_i16mf4_m | ( | ... | ) | __riscv_vsoxei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxei16_v_i32m1 | ( | ... | ) | __riscv_vsoxei16_v_i32m1(__VA_ARGS__) |
| #define vsoxei16_v_i32m1_m | ( | ... | ) | __riscv_vsoxei16_v_i32m1_m(__VA_ARGS__) |
| #define vsoxei16_v_i32m2 | ( | ... | ) | __riscv_vsoxei16_v_i32m2(__VA_ARGS__) |
| #define vsoxei16_v_i32m2_m | ( | ... | ) | __riscv_vsoxei16_v_i32m2_m(__VA_ARGS__) |
| #define vsoxei16_v_i32m4 | ( | ... | ) | __riscv_vsoxei16_v_i32m4(__VA_ARGS__) |
| #define vsoxei16_v_i32m4_m | ( | ... | ) | __riscv_vsoxei16_v_i32m4_m(__VA_ARGS__) |
| #define vsoxei16_v_i32m8 | ( | ... | ) | __riscv_vsoxei16_v_i32m8(__VA_ARGS__) |
| #define vsoxei16_v_i32m8_m | ( | ... | ) | __riscv_vsoxei16_v_i32m8_m(__VA_ARGS__) |
| #define vsoxei16_v_i32mf2 | ( | ... | ) | __riscv_vsoxei16_v_i32mf2(__VA_ARGS__) |
| #define vsoxei16_v_i32mf2_m | ( | ... | ) | __riscv_vsoxei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxei16_v_i64m1 | ( | ... | ) | __riscv_vsoxei16_v_i64m1(__VA_ARGS__) |
| #define vsoxei16_v_i64m1_m | ( | ... | ) | __riscv_vsoxei16_v_i64m1_m(__VA_ARGS__) |
| #define vsoxei16_v_i64m2 | ( | ... | ) | __riscv_vsoxei16_v_i64m2(__VA_ARGS__) |
| #define vsoxei16_v_i64m2_m | ( | ... | ) | __riscv_vsoxei16_v_i64m2_m(__VA_ARGS__) |
| #define vsoxei16_v_i64m4 | ( | ... | ) | __riscv_vsoxei16_v_i64m4(__VA_ARGS__) |
| #define vsoxei16_v_i64m4_m | ( | ... | ) | __riscv_vsoxei16_v_i64m4_m(__VA_ARGS__) |
| #define vsoxei16_v_i64m8 | ( | ... | ) | __riscv_vsoxei16_v_i64m8(__VA_ARGS__) |
| #define vsoxei16_v_i64m8_m | ( | ... | ) | __riscv_vsoxei16_v_i64m8_m(__VA_ARGS__) |
| #define vsoxei16_v_i8m1 | ( | ... | ) | __riscv_vsoxei16_v_i8m1(__VA_ARGS__) |
| #define vsoxei16_v_i8m1_m | ( | ... | ) | __riscv_vsoxei16_v_i8m1_m(__VA_ARGS__) |
| #define vsoxei16_v_i8m2 | ( | ... | ) | __riscv_vsoxei16_v_i8m2(__VA_ARGS__) |
| #define vsoxei16_v_i8m2_m | ( | ... | ) | __riscv_vsoxei16_v_i8m2_m(__VA_ARGS__) |
| #define vsoxei16_v_i8m4 | ( | ... | ) | __riscv_vsoxei16_v_i8m4(__VA_ARGS__) |
| #define vsoxei16_v_i8m4_m | ( | ... | ) | __riscv_vsoxei16_v_i8m4_m(__VA_ARGS__) |
| #define vsoxei16_v_i8mf2 | ( | ... | ) | __riscv_vsoxei16_v_i8mf2(__VA_ARGS__) |
| #define vsoxei16_v_i8mf2_m | ( | ... | ) | __riscv_vsoxei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxei16_v_i8mf4 | ( | ... | ) | __riscv_vsoxei16_v_i8mf4(__VA_ARGS__) |
| #define vsoxei16_v_i8mf4_m | ( | ... | ) | __riscv_vsoxei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxei16_v_i8mf8 | ( | ... | ) | __riscv_vsoxei16_v_i8mf8(__VA_ARGS__) |
| #define vsoxei16_v_i8mf8_m | ( | ... | ) | __riscv_vsoxei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxei16_v_u16m1 | ( | ... | ) | __riscv_vsoxei16_v_u16m1(__VA_ARGS__) |
| #define vsoxei16_v_u16m1_m | ( | ... | ) | __riscv_vsoxei16_v_u16m1_m(__VA_ARGS__) |
| #define vsoxei16_v_u16m2 | ( | ... | ) | __riscv_vsoxei16_v_u16m2(__VA_ARGS__) |
| #define vsoxei16_v_u16m2_m | ( | ... | ) | __riscv_vsoxei16_v_u16m2_m(__VA_ARGS__) |
| #define vsoxei16_v_u16m4 | ( | ... | ) | __riscv_vsoxei16_v_u16m4(__VA_ARGS__) |
| #define vsoxei16_v_u16m4_m | ( | ... | ) | __riscv_vsoxei16_v_u16m4_m(__VA_ARGS__) |
| #define vsoxei16_v_u16m8 | ( | ... | ) | __riscv_vsoxei16_v_u16m8(__VA_ARGS__) |
| #define vsoxei16_v_u16m8_m | ( | ... | ) | __riscv_vsoxei16_v_u16m8_m(__VA_ARGS__) |
| #define vsoxei16_v_u16mf2 | ( | ... | ) | __riscv_vsoxei16_v_u16mf2(__VA_ARGS__) |
| #define vsoxei16_v_u16mf2_m | ( | ... | ) | __riscv_vsoxei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxei16_v_u16mf4 | ( | ... | ) | __riscv_vsoxei16_v_u16mf4(__VA_ARGS__) |
| #define vsoxei16_v_u16mf4_m | ( | ... | ) | __riscv_vsoxei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxei16_v_u32m1 | ( | ... | ) | __riscv_vsoxei16_v_u32m1(__VA_ARGS__) |
| #define vsoxei16_v_u32m1_m | ( | ... | ) | __riscv_vsoxei16_v_u32m1_m(__VA_ARGS__) |
| #define vsoxei16_v_u32m2 | ( | ... | ) | __riscv_vsoxei16_v_u32m2(__VA_ARGS__) |
| #define vsoxei16_v_u32m2_m | ( | ... | ) | __riscv_vsoxei16_v_u32m2_m(__VA_ARGS__) |
| #define vsoxei16_v_u32m4 | ( | ... | ) | __riscv_vsoxei16_v_u32m4(__VA_ARGS__) |
| #define vsoxei16_v_u32m4_m | ( | ... | ) | __riscv_vsoxei16_v_u32m4_m(__VA_ARGS__) |
| #define vsoxei16_v_u32m8 | ( | ... | ) | __riscv_vsoxei16_v_u32m8(__VA_ARGS__) |
| #define vsoxei16_v_u32m8_m | ( | ... | ) | __riscv_vsoxei16_v_u32m8_m(__VA_ARGS__) |
| #define vsoxei16_v_u32mf2 | ( | ... | ) | __riscv_vsoxei16_v_u32mf2(__VA_ARGS__) |
| #define vsoxei16_v_u32mf2_m | ( | ... | ) | __riscv_vsoxei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxei16_v_u64m1 | ( | ... | ) | __riscv_vsoxei16_v_u64m1(__VA_ARGS__) |
| #define vsoxei16_v_u64m1_m | ( | ... | ) | __riscv_vsoxei16_v_u64m1_m(__VA_ARGS__) |
| #define vsoxei16_v_u64m2 | ( | ... | ) | __riscv_vsoxei16_v_u64m2(__VA_ARGS__) |
| #define vsoxei16_v_u64m2_m | ( | ... | ) | __riscv_vsoxei16_v_u64m2_m(__VA_ARGS__) |
| #define vsoxei16_v_u64m4 | ( | ... | ) | __riscv_vsoxei16_v_u64m4(__VA_ARGS__) |
| #define vsoxei16_v_u64m4_m | ( | ... | ) | __riscv_vsoxei16_v_u64m4_m(__VA_ARGS__) |
| #define vsoxei16_v_u64m8 | ( | ... | ) | __riscv_vsoxei16_v_u64m8(__VA_ARGS__) |
| #define vsoxei16_v_u64m8_m | ( | ... | ) | __riscv_vsoxei16_v_u64m8_m(__VA_ARGS__) |
| #define vsoxei16_v_u8m1 | ( | ... | ) | __riscv_vsoxei16_v_u8m1(__VA_ARGS__) |
| #define vsoxei16_v_u8m1_m | ( | ... | ) | __riscv_vsoxei16_v_u8m1_m(__VA_ARGS__) |
| #define vsoxei16_v_u8m2 | ( | ... | ) | __riscv_vsoxei16_v_u8m2(__VA_ARGS__) |
| #define vsoxei16_v_u8m2_m | ( | ... | ) | __riscv_vsoxei16_v_u8m2_m(__VA_ARGS__) |
| #define vsoxei16_v_u8m4 | ( | ... | ) | __riscv_vsoxei16_v_u8m4(__VA_ARGS__) |
| #define vsoxei16_v_u8m4_m | ( | ... | ) | __riscv_vsoxei16_v_u8m4_m(__VA_ARGS__) |
| #define vsoxei16_v_u8mf2 | ( | ... | ) | __riscv_vsoxei16_v_u8mf2(__VA_ARGS__) |
| #define vsoxei16_v_u8mf2_m | ( | ... | ) | __riscv_vsoxei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxei16_v_u8mf4 | ( | ... | ) | __riscv_vsoxei16_v_u8mf4(__VA_ARGS__) |
| #define vsoxei16_v_u8mf4_m | ( | ... | ) | __riscv_vsoxei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxei16_v_u8mf8 | ( | ... | ) | __riscv_vsoxei16_v_u8mf8(__VA_ARGS__) |
| #define vsoxei16_v_u8mf8_m | ( | ... | ) | __riscv_vsoxei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxei32_v_f16m1 | ( | ... | ) | __riscv_vsoxei32_v_f16m1(__VA_ARGS__) |
| #define vsoxei32_v_f16m1_m | ( | ... | ) | __riscv_vsoxei32_v_f16m1_m(__VA_ARGS__) |
| #define vsoxei32_v_f16m2 | ( | ... | ) | __riscv_vsoxei32_v_f16m2(__VA_ARGS__) |
| #define vsoxei32_v_f16m2_m | ( | ... | ) | __riscv_vsoxei32_v_f16m2_m(__VA_ARGS__) |
| #define vsoxei32_v_f16m4 | ( | ... | ) | __riscv_vsoxei32_v_f16m4(__VA_ARGS__) |
| #define vsoxei32_v_f16m4_m | ( | ... | ) | __riscv_vsoxei32_v_f16m4_m(__VA_ARGS__) |
| #define vsoxei32_v_f16mf2 | ( | ... | ) | __riscv_vsoxei32_v_f16mf2(__VA_ARGS__) |
| #define vsoxei32_v_f16mf2_m | ( | ... | ) | __riscv_vsoxei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxei32_v_f16mf4 | ( | ... | ) | __riscv_vsoxei32_v_f16mf4(__VA_ARGS__) |
| #define vsoxei32_v_f16mf4_m | ( | ... | ) | __riscv_vsoxei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxei32_v_f32m1 | ( | ... | ) | __riscv_vsoxei32_v_f32m1(__VA_ARGS__) |
| #define vsoxei32_v_f32m1_m | ( | ... | ) | __riscv_vsoxei32_v_f32m1_m(__VA_ARGS__) |
| #define vsoxei32_v_f32m2 | ( | ... | ) | __riscv_vsoxei32_v_f32m2(__VA_ARGS__) |
| #define vsoxei32_v_f32m2_m | ( | ... | ) | __riscv_vsoxei32_v_f32m2_m(__VA_ARGS__) |
| #define vsoxei32_v_f32m4 | ( | ... | ) | __riscv_vsoxei32_v_f32m4(__VA_ARGS__) |
| #define vsoxei32_v_f32m4_m | ( | ... | ) | __riscv_vsoxei32_v_f32m4_m(__VA_ARGS__) |
| #define vsoxei32_v_f32m8 | ( | ... | ) | __riscv_vsoxei32_v_f32m8(__VA_ARGS__) |
| #define vsoxei32_v_f32m8_m | ( | ... | ) | __riscv_vsoxei32_v_f32m8_m(__VA_ARGS__) |
| #define vsoxei32_v_f32mf2 | ( | ... | ) | __riscv_vsoxei32_v_f32mf2(__VA_ARGS__) |
| #define vsoxei32_v_f32mf2_m | ( | ... | ) | __riscv_vsoxei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxei32_v_f64m1 | ( | ... | ) | __riscv_vsoxei32_v_f64m1(__VA_ARGS__) |
| #define vsoxei32_v_f64m1_m | ( | ... | ) | __riscv_vsoxei32_v_f64m1_m(__VA_ARGS__) |
| #define vsoxei32_v_f64m2 | ( | ... | ) | __riscv_vsoxei32_v_f64m2(__VA_ARGS__) |
| #define vsoxei32_v_f64m2_m | ( | ... | ) | __riscv_vsoxei32_v_f64m2_m(__VA_ARGS__) |
| #define vsoxei32_v_f64m4 | ( | ... | ) | __riscv_vsoxei32_v_f64m4(__VA_ARGS__) |
| #define vsoxei32_v_f64m4_m | ( | ... | ) | __riscv_vsoxei32_v_f64m4_m(__VA_ARGS__) |
| #define vsoxei32_v_f64m8 | ( | ... | ) | __riscv_vsoxei32_v_f64m8(__VA_ARGS__) |
| #define vsoxei32_v_f64m8_m | ( | ... | ) | __riscv_vsoxei32_v_f64m8_m(__VA_ARGS__) |
| #define vsoxei32_v_i16m1 | ( | ... | ) | __riscv_vsoxei32_v_i16m1(__VA_ARGS__) |
| #define vsoxei32_v_i16m1_m | ( | ... | ) | __riscv_vsoxei32_v_i16m1_m(__VA_ARGS__) |
| #define vsoxei32_v_i16m2 | ( | ... | ) | __riscv_vsoxei32_v_i16m2(__VA_ARGS__) |
| #define vsoxei32_v_i16m2_m | ( | ... | ) | __riscv_vsoxei32_v_i16m2_m(__VA_ARGS__) |
| #define vsoxei32_v_i16m4 | ( | ... | ) | __riscv_vsoxei32_v_i16m4(__VA_ARGS__) |
| #define vsoxei32_v_i16m4_m | ( | ... | ) | __riscv_vsoxei32_v_i16m4_m(__VA_ARGS__) |
| #define vsoxei32_v_i16mf2 | ( | ... | ) | __riscv_vsoxei32_v_i16mf2(__VA_ARGS__) |
| #define vsoxei32_v_i16mf2_m | ( | ... | ) | __riscv_vsoxei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxei32_v_i16mf4 | ( | ... | ) | __riscv_vsoxei32_v_i16mf4(__VA_ARGS__) |
| #define vsoxei32_v_i16mf4_m | ( | ... | ) | __riscv_vsoxei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxei32_v_i32m1 | ( | ... | ) | __riscv_vsoxei32_v_i32m1(__VA_ARGS__) |
| #define vsoxei32_v_i32m1_m | ( | ... | ) | __riscv_vsoxei32_v_i32m1_m(__VA_ARGS__) |
| #define vsoxei32_v_i32m2 | ( | ... | ) | __riscv_vsoxei32_v_i32m2(__VA_ARGS__) |
| #define vsoxei32_v_i32m2_m | ( | ... | ) | __riscv_vsoxei32_v_i32m2_m(__VA_ARGS__) |
| #define vsoxei32_v_i32m4 | ( | ... | ) | __riscv_vsoxei32_v_i32m4(__VA_ARGS__) |
| #define vsoxei32_v_i32m4_m | ( | ... | ) | __riscv_vsoxei32_v_i32m4_m(__VA_ARGS__) |
| #define vsoxei32_v_i32m8 | ( | ... | ) | __riscv_vsoxei32_v_i32m8(__VA_ARGS__) |
| #define vsoxei32_v_i32m8_m | ( | ... | ) | __riscv_vsoxei32_v_i32m8_m(__VA_ARGS__) |
| #define vsoxei32_v_i32mf2 | ( | ... | ) | __riscv_vsoxei32_v_i32mf2(__VA_ARGS__) |
| #define vsoxei32_v_i32mf2_m | ( | ... | ) | __riscv_vsoxei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxei32_v_i64m1 | ( | ... | ) | __riscv_vsoxei32_v_i64m1(__VA_ARGS__) |
| #define vsoxei32_v_i64m1_m | ( | ... | ) | __riscv_vsoxei32_v_i64m1_m(__VA_ARGS__) |
| #define vsoxei32_v_i64m2 | ( | ... | ) | __riscv_vsoxei32_v_i64m2(__VA_ARGS__) |
| #define vsoxei32_v_i64m2_m | ( | ... | ) | __riscv_vsoxei32_v_i64m2_m(__VA_ARGS__) |
| #define vsoxei32_v_i64m4 | ( | ... | ) | __riscv_vsoxei32_v_i64m4(__VA_ARGS__) |
| #define vsoxei32_v_i64m4_m | ( | ... | ) | __riscv_vsoxei32_v_i64m4_m(__VA_ARGS__) |
| #define vsoxei32_v_i64m8 | ( | ... | ) | __riscv_vsoxei32_v_i64m8(__VA_ARGS__) |
| #define vsoxei32_v_i64m8_m | ( | ... | ) | __riscv_vsoxei32_v_i64m8_m(__VA_ARGS__) |
| #define vsoxei32_v_i8m1 | ( | ... | ) | __riscv_vsoxei32_v_i8m1(__VA_ARGS__) |
| #define vsoxei32_v_i8m1_m | ( | ... | ) | __riscv_vsoxei32_v_i8m1_m(__VA_ARGS__) |
| #define vsoxei32_v_i8m2 | ( | ... | ) | __riscv_vsoxei32_v_i8m2(__VA_ARGS__) |
| #define vsoxei32_v_i8m2_m | ( | ... | ) | __riscv_vsoxei32_v_i8m2_m(__VA_ARGS__) |
| #define vsoxei32_v_i8mf2 | ( | ... | ) | __riscv_vsoxei32_v_i8mf2(__VA_ARGS__) |
| #define vsoxei32_v_i8mf2_m | ( | ... | ) | __riscv_vsoxei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxei32_v_i8mf4 | ( | ... | ) | __riscv_vsoxei32_v_i8mf4(__VA_ARGS__) |
| #define vsoxei32_v_i8mf4_m | ( | ... | ) | __riscv_vsoxei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxei32_v_i8mf8 | ( | ... | ) | __riscv_vsoxei32_v_i8mf8(__VA_ARGS__) |
| #define vsoxei32_v_i8mf8_m | ( | ... | ) | __riscv_vsoxei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxei32_v_u16m1 | ( | ... | ) | __riscv_vsoxei32_v_u16m1(__VA_ARGS__) |
| #define vsoxei32_v_u16m1_m | ( | ... | ) | __riscv_vsoxei32_v_u16m1_m(__VA_ARGS__) |
| #define vsoxei32_v_u16m2 | ( | ... | ) | __riscv_vsoxei32_v_u16m2(__VA_ARGS__) |
| #define vsoxei32_v_u16m2_m | ( | ... | ) | __riscv_vsoxei32_v_u16m2_m(__VA_ARGS__) |
| #define vsoxei32_v_u16m4 | ( | ... | ) | __riscv_vsoxei32_v_u16m4(__VA_ARGS__) |
| #define vsoxei32_v_u16m4_m | ( | ... | ) | __riscv_vsoxei32_v_u16m4_m(__VA_ARGS__) |
| #define vsoxei32_v_u16mf2 | ( | ... | ) | __riscv_vsoxei32_v_u16mf2(__VA_ARGS__) |
| #define vsoxei32_v_u16mf2_m | ( | ... | ) | __riscv_vsoxei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxei32_v_u16mf4 | ( | ... | ) | __riscv_vsoxei32_v_u16mf4(__VA_ARGS__) |
| #define vsoxei32_v_u16mf4_m | ( | ... | ) | __riscv_vsoxei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxei32_v_u32m1 | ( | ... | ) | __riscv_vsoxei32_v_u32m1(__VA_ARGS__) |
| #define vsoxei32_v_u32m1_m | ( | ... | ) | __riscv_vsoxei32_v_u32m1_m(__VA_ARGS__) |
| #define vsoxei32_v_u32m2 | ( | ... | ) | __riscv_vsoxei32_v_u32m2(__VA_ARGS__) |
| #define vsoxei32_v_u32m2_m | ( | ... | ) | __riscv_vsoxei32_v_u32m2_m(__VA_ARGS__) |
| #define vsoxei32_v_u32m4 | ( | ... | ) | __riscv_vsoxei32_v_u32m4(__VA_ARGS__) |
| #define vsoxei32_v_u32m4_m | ( | ... | ) | __riscv_vsoxei32_v_u32m4_m(__VA_ARGS__) |
| #define vsoxei32_v_u32m8 | ( | ... | ) | __riscv_vsoxei32_v_u32m8(__VA_ARGS__) |
| #define vsoxei32_v_u32m8_m | ( | ... | ) | __riscv_vsoxei32_v_u32m8_m(__VA_ARGS__) |
| #define vsoxei32_v_u32mf2 | ( | ... | ) | __riscv_vsoxei32_v_u32mf2(__VA_ARGS__) |
| #define vsoxei32_v_u32mf2_m | ( | ... | ) | __riscv_vsoxei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxei32_v_u64m1 | ( | ... | ) | __riscv_vsoxei32_v_u64m1(__VA_ARGS__) |
| #define vsoxei32_v_u64m1_m | ( | ... | ) | __riscv_vsoxei32_v_u64m1_m(__VA_ARGS__) |
| #define vsoxei32_v_u64m2 | ( | ... | ) | __riscv_vsoxei32_v_u64m2(__VA_ARGS__) |
| #define vsoxei32_v_u64m2_m | ( | ... | ) | __riscv_vsoxei32_v_u64m2_m(__VA_ARGS__) |
| #define vsoxei32_v_u64m4 | ( | ... | ) | __riscv_vsoxei32_v_u64m4(__VA_ARGS__) |
| #define vsoxei32_v_u64m4_m | ( | ... | ) | __riscv_vsoxei32_v_u64m4_m(__VA_ARGS__) |
| #define vsoxei32_v_u64m8 | ( | ... | ) | __riscv_vsoxei32_v_u64m8(__VA_ARGS__) |
| #define vsoxei32_v_u64m8_m | ( | ... | ) | __riscv_vsoxei32_v_u64m8_m(__VA_ARGS__) |
| #define vsoxei32_v_u8m1 | ( | ... | ) | __riscv_vsoxei32_v_u8m1(__VA_ARGS__) |
| #define vsoxei32_v_u8m1_m | ( | ... | ) | __riscv_vsoxei32_v_u8m1_m(__VA_ARGS__) |
| #define vsoxei32_v_u8m2 | ( | ... | ) | __riscv_vsoxei32_v_u8m2(__VA_ARGS__) |
| #define vsoxei32_v_u8m2_m | ( | ... | ) | __riscv_vsoxei32_v_u8m2_m(__VA_ARGS__) |
| #define vsoxei32_v_u8mf2 | ( | ... | ) | __riscv_vsoxei32_v_u8mf2(__VA_ARGS__) |
| #define vsoxei32_v_u8mf2_m | ( | ... | ) | __riscv_vsoxei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxei32_v_u8mf4 | ( | ... | ) | __riscv_vsoxei32_v_u8mf4(__VA_ARGS__) |
| #define vsoxei32_v_u8mf4_m | ( | ... | ) | __riscv_vsoxei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxei32_v_u8mf8 | ( | ... | ) | __riscv_vsoxei32_v_u8mf8(__VA_ARGS__) |
| #define vsoxei32_v_u8mf8_m | ( | ... | ) | __riscv_vsoxei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxei64_v_f16m1 | ( | ... | ) | __riscv_vsoxei64_v_f16m1(__VA_ARGS__) |
| #define vsoxei64_v_f16m1_m | ( | ... | ) | __riscv_vsoxei64_v_f16m1_m(__VA_ARGS__) |
| #define vsoxei64_v_f16m2 | ( | ... | ) | __riscv_vsoxei64_v_f16m2(__VA_ARGS__) |
| #define vsoxei64_v_f16m2_m | ( | ... | ) | __riscv_vsoxei64_v_f16m2_m(__VA_ARGS__) |
| #define vsoxei64_v_f16mf2 | ( | ... | ) | __riscv_vsoxei64_v_f16mf2(__VA_ARGS__) |
| #define vsoxei64_v_f16mf2_m | ( | ... | ) | __riscv_vsoxei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxei64_v_f16mf4 | ( | ... | ) | __riscv_vsoxei64_v_f16mf4(__VA_ARGS__) |
| #define vsoxei64_v_f16mf4_m | ( | ... | ) | __riscv_vsoxei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxei64_v_f32m1 | ( | ... | ) | __riscv_vsoxei64_v_f32m1(__VA_ARGS__) |
| #define vsoxei64_v_f32m1_m | ( | ... | ) | __riscv_vsoxei64_v_f32m1_m(__VA_ARGS__) |
| #define vsoxei64_v_f32m2 | ( | ... | ) | __riscv_vsoxei64_v_f32m2(__VA_ARGS__) |
| #define vsoxei64_v_f32m2_m | ( | ... | ) | __riscv_vsoxei64_v_f32m2_m(__VA_ARGS__) |
| #define vsoxei64_v_f32m4 | ( | ... | ) | __riscv_vsoxei64_v_f32m4(__VA_ARGS__) |
| #define vsoxei64_v_f32m4_m | ( | ... | ) | __riscv_vsoxei64_v_f32m4_m(__VA_ARGS__) |
| #define vsoxei64_v_f32mf2 | ( | ... | ) | __riscv_vsoxei64_v_f32mf2(__VA_ARGS__) |
| #define vsoxei64_v_f32mf2_m | ( | ... | ) | __riscv_vsoxei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxei64_v_f64m1 | ( | ... | ) | __riscv_vsoxei64_v_f64m1(__VA_ARGS__) |
| #define vsoxei64_v_f64m1_m | ( | ... | ) | __riscv_vsoxei64_v_f64m1_m(__VA_ARGS__) |
| #define vsoxei64_v_f64m2 | ( | ... | ) | __riscv_vsoxei64_v_f64m2(__VA_ARGS__) |
| #define vsoxei64_v_f64m2_m | ( | ... | ) | __riscv_vsoxei64_v_f64m2_m(__VA_ARGS__) |
| #define vsoxei64_v_f64m4 | ( | ... | ) | __riscv_vsoxei64_v_f64m4(__VA_ARGS__) |
| #define vsoxei64_v_f64m4_m | ( | ... | ) | __riscv_vsoxei64_v_f64m4_m(__VA_ARGS__) |
| #define vsoxei64_v_f64m8 | ( | ... | ) | __riscv_vsoxei64_v_f64m8(__VA_ARGS__) |
| #define vsoxei64_v_f64m8_m | ( | ... | ) | __riscv_vsoxei64_v_f64m8_m(__VA_ARGS__) |
| #define vsoxei64_v_i16m1 | ( | ... | ) | __riscv_vsoxei64_v_i16m1(__VA_ARGS__) |
| #define vsoxei64_v_i16m1_m | ( | ... | ) | __riscv_vsoxei64_v_i16m1_m(__VA_ARGS__) |
| #define vsoxei64_v_i16m2 | ( | ... | ) | __riscv_vsoxei64_v_i16m2(__VA_ARGS__) |
| #define vsoxei64_v_i16m2_m | ( | ... | ) | __riscv_vsoxei64_v_i16m2_m(__VA_ARGS__) |
| #define vsoxei64_v_i16mf2 | ( | ... | ) | __riscv_vsoxei64_v_i16mf2(__VA_ARGS__) |
| #define vsoxei64_v_i16mf2_m | ( | ... | ) | __riscv_vsoxei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxei64_v_i16mf4 | ( | ... | ) | __riscv_vsoxei64_v_i16mf4(__VA_ARGS__) |
| #define vsoxei64_v_i16mf4_m | ( | ... | ) | __riscv_vsoxei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxei64_v_i32m1 | ( | ... | ) | __riscv_vsoxei64_v_i32m1(__VA_ARGS__) |
| #define vsoxei64_v_i32m1_m | ( | ... | ) | __riscv_vsoxei64_v_i32m1_m(__VA_ARGS__) |
| #define vsoxei64_v_i32m2 | ( | ... | ) | __riscv_vsoxei64_v_i32m2(__VA_ARGS__) |
| #define vsoxei64_v_i32m2_m | ( | ... | ) | __riscv_vsoxei64_v_i32m2_m(__VA_ARGS__) |
| #define vsoxei64_v_i32m4 | ( | ... | ) | __riscv_vsoxei64_v_i32m4(__VA_ARGS__) |
| #define vsoxei64_v_i32m4_m | ( | ... | ) | __riscv_vsoxei64_v_i32m4_m(__VA_ARGS__) |
| #define vsoxei64_v_i32mf2 | ( | ... | ) | __riscv_vsoxei64_v_i32mf2(__VA_ARGS__) |
| #define vsoxei64_v_i32mf2_m | ( | ... | ) | __riscv_vsoxei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxei64_v_i64m1 | ( | ... | ) | __riscv_vsoxei64_v_i64m1(__VA_ARGS__) |
| #define vsoxei64_v_i64m1_m | ( | ... | ) | __riscv_vsoxei64_v_i64m1_m(__VA_ARGS__) |
| #define vsoxei64_v_i64m2 | ( | ... | ) | __riscv_vsoxei64_v_i64m2(__VA_ARGS__) |
| #define vsoxei64_v_i64m2_m | ( | ... | ) | __riscv_vsoxei64_v_i64m2_m(__VA_ARGS__) |
| #define vsoxei64_v_i64m4 | ( | ... | ) | __riscv_vsoxei64_v_i64m4(__VA_ARGS__) |
| #define vsoxei64_v_i64m4_m | ( | ... | ) | __riscv_vsoxei64_v_i64m4_m(__VA_ARGS__) |
| #define vsoxei64_v_i64m8 | ( | ... | ) | __riscv_vsoxei64_v_i64m8(__VA_ARGS__) |
| #define vsoxei64_v_i64m8_m | ( | ... | ) | __riscv_vsoxei64_v_i64m8_m(__VA_ARGS__) |
| #define vsoxei64_v_i8m1 | ( | ... | ) | __riscv_vsoxei64_v_i8m1(__VA_ARGS__) |
| #define vsoxei64_v_i8m1_m | ( | ... | ) | __riscv_vsoxei64_v_i8m1_m(__VA_ARGS__) |
| #define vsoxei64_v_i8mf2 | ( | ... | ) | __riscv_vsoxei64_v_i8mf2(__VA_ARGS__) |
| #define vsoxei64_v_i8mf2_m | ( | ... | ) | __riscv_vsoxei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxei64_v_i8mf4 | ( | ... | ) | __riscv_vsoxei64_v_i8mf4(__VA_ARGS__) |
| #define vsoxei64_v_i8mf4_m | ( | ... | ) | __riscv_vsoxei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxei64_v_i8mf8 | ( | ... | ) | __riscv_vsoxei64_v_i8mf8(__VA_ARGS__) |
| #define vsoxei64_v_i8mf8_m | ( | ... | ) | __riscv_vsoxei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxei64_v_u16m1 | ( | ... | ) | __riscv_vsoxei64_v_u16m1(__VA_ARGS__) |
| #define vsoxei64_v_u16m1_m | ( | ... | ) | __riscv_vsoxei64_v_u16m1_m(__VA_ARGS__) |
| #define vsoxei64_v_u16m2 | ( | ... | ) | __riscv_vsoxei64_v_u16m2(__VA_ARGS__) |
| #define vsoxei64_v_u16m2_m | ( | ... | ) | __riscv_vsoxei64_v_u16m2_m(__VA_ARGS__) |
| #define vsoxei64_v_u16mf2 | ( | ... | ) | __riscv_vsoxei64_v_u16mf2(__VA_ARGS__) |
| #define vsoxei64_v_u16mf2_m | ( | ... | ) | __riscv_vsoxei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxei64_v_u16mf4 | ( | ... | ) | __riscv_vsoxei64_v_u16mf4(__VA_ARGS__) |
| #define vsoxei64_v_u16mf4_m | ( | ... | ) | __riscv_vsoxei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxei64_v_u32m1 | ( | ... | ) | __riscv_vsoxei64_v_u32m1(__VA_ARGS__) |
| #define vsoxei64_v_u32m1_m | ( | ... | ) | __riscv_vsoxei64_v_u32m1_m(__VA_ARGS__) |
| #define vsoxei64_v_u32m2 | ( | ... | ) | __riscv_vsoxei64_v_u32m2(__VA_ARGS__) |
| #define vsoxei64_v_u32m2_m | ( | ... | ) | __riscv_vsoxei64_v_u32m2_m(__VA_ARGS__) |
| #define vsoxei64_v_u32m4 | ( | ... | ) | __riscv_vsoxei64_v_u32m4(__VA_ARGS__) |
| #define vsoxei64_v_u32m4_m | ( | ... | ) | __riscv_vsoxei64_v_u32m4_m(__VA_ARGS__) |
| #define vsoxei64_v_u32mf2 | ( | ... | ) | __riscv_vsoxei64_v_u32mf2(__VA_ARGS__) |
| #define vsoxei64_v_u32mf2_m | ( | ... | ) | __riscv_vsoxei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxei64_v_u64m1 | ( | ... | ) | __riscv_vsoxei64_v_u64m1(__VA_ARGS__) |
| #define vsoxei64_v_u64m1_m | ( | ... | ) | __riscv_vsoxei64_v_u64m1_m(__VA_ARGS__) |
| #define vsoxei64_v_u64m2 | ( | ... | ) | __riscv_vsoxei64_v_u64m2(__VA_ARGS__) |
| #define vsoxei64_v_u64m2_m | ( | ... | ) | __riscv_vsoxei64_v_u64m2_m(__VA_ARGS__) |
| #define vsoxei64_v_u64m4 | ( | ... | ) | __riscv_vsoxei64_v_u64m4(__VA_ARGS__) |
| #define vsoxei64_v_u64m4_m | ( | ... | ) | __riscv_vsoxei64_v_u64m4_m(__VA_ARGS__) |
| #define vsoxei64_v_u64m8 | ( | ... | ) | __riscv_vsoxei64_v_u64m8(__VA_ARGS__) |
| #define vsoxei64_v_u64m8_m | ( | ... | ) | __riscv_vsoxei64_v_u64m8_m(__VA_ARGS__) |
| #define vsoxei64_v_u8m1 | ( | ... | ) | __riscv_vsoxei64_v_u8m1(__VA_ARGS__) |
| #define vsoxei64_v_u8m1_m | ( | ... | ) | __riscv_vsoxei64_v_u8m1_m(__VA_ARGS__) |
| #define vsoxei64_v_u8mf2 | ( | ... | ) | __riscv_vsoxei64_v_u8mf2(__VA_ARGS__) |
| #define vsoxei64_v_u8mf2_m | ( | ... | ) | __riscv_vsoxei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxei64_v_u8mf4 | ( | ... | ) | __riscv_vsoxei64_v_u8mf4(__VA_ARGS__) |
| #define vsoxei64_v_u8mf4_m | ( | ... | ) | __riscv_vsoxei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxei64_v_u8mf8 | ( | ... | ) | __riscv_vsoxei64_v_u8mf8(__VA_ARGS__) |
| #define vsoxei64_v_u8mf8_m | ( | ... | ) | __riscv_vsoxei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxei8_v_f16m1 | ( | ... | ) | __riscv_vsoxei8_v_f16m1(__VA_ARGS__) |
| #define vsoxei8_v_f16m1_m | ( | ... | ) | __riscv_vsoxei8_v_f16m1_m(__VA_ARGS__) |
| #define vsoxei8_v_f16m2 | ( | ... | ) | __riscv_vsoxei8_v_f16m2(__VA_ARGS__) |
| #define vsoxei8_v_f16m2_m | ( | ... | ) | __riscv_vsoxei8_v_f16m2_m(__VA_ARGS__) |
| #define vsoxei8_v_f16m4 | ( | ... | ) | __riscv_vsoxei8_v_f16m4(__VA_ARGS__) |
| #define vsoxei8_v_f16m4_m | ( | ... | ) | __riscv_vsoxei8_v_f16m4_m(__VA_ARGS__) |
| #define vsoxei8_v_f16m8 | ( | ... | ) | __riscv_vsoxei8_v_f16m8(__VA_ARGS__) |
| #define vsoxei8_v_f16m8_m | ( | ... | ) | __riscv_vsoxei8_v_f16m8_m(__VA_ARGS__) |
| #define vsoxei8_v_f16mf2 | ( | ... | ) | __riscv_vsoxei8_v_f16mf2(__VA_ARGS__) |
| #define vsoxei8_v_f16mf2_m | ( | ... | ) | __riscv_vsoxei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxei8_v_f16mf4 | ( | ... | ) | __riscv_vsoxei8_v_f16mf4(__VA_ARGS__) |
| #define vsoxei8_v_f16mf4_m | ( | ... | ) | __riscv_vsoxei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxei8_v_f32m1 | ( | ... | ) | __riscv_vsoxei8_v_f32m1(__VA_ARGS__) |
| #define vsoxei8_v_f32m1_m | ( | ... | ) | __riscv_vsoxei8_v_f32m1_m(__VA_ARGS__) |
| #define vsoxei8_v_f32m2 | ( | ... | ) | __riscv_vsoxei8_v_f32m2(__VA_ARGS__) |
| #define vsoxei8_v_f32m2_m | ( | ... | ) | __riscv_vsoxei8_v_f32m2_m(__VA_ARGS__) |
| #define vsoxei8_v_f32m4 | ( | ... | ) | __riscv_vsoxei8_v_f32m4(__VA_ARGS__) |
| #define vsoxei8_v_f32m4_m | ( | ... | ) | __riscv_vsoxei8_v_f32m4_m(__VA_ARGS__) |
| #define vsoxei8_v_f32m8 | ( | ... | ) | __riscv_vsoxei8_v_f32m8(__VA_ARGS__) |
| #define vsoxei8_v_f32m8_m | ( | ... | ) | __riscv_vsoxei8_v_f32m8_m(__VA_ARGS__) |
| #define vsoxei8_v_f32mf2 | ( | ... | ) | __riscv_vsoxei8_v_f32mf2(__VA_ARGS__) |
| #define vsoxei8_v_f32mf2_m | ( | ... | ) | __riscv_vsoxei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxei8_v_f64m1 | ( | ... | ) | __riscv_vsoxei8_v_f64m1(__VA_ARGS__) |
| #define vsoxei8_v_f64m1_m | ( | ... | ) | __riscv_vsoxei8_v_f64m1_m(__VA_ARGS__) |
| #define vsoxei8_v_f64m2 | ( | ... | ) | __riscv_vsoxei8_v_f64m2(__VA_ARGS__) |
| #define vsoxei8_v_f64m2_m | ( | ... | ) | __riscv_vsoxei8_v_f64m2_m(__VA_ARGS__) |
| #define vsoxei8_v_f64m4 | ( | ... | ) | __riscv_vsoxei8_v_f64m4(__VA_ARGS__) |
| #define vsoxei8_v_f64m4_m | ( | ... | ) | __riscv_vsoxei8_v_f64m4_m(__VA_ARGS__) |
| #define vsoxei8_v_f64m8 | ( | ... | ) | __riscv_vsoxei8_v_f64m8(__VA_ARGS__) |
| #define vsoxei8_v_f64m8_m | ( | ... | ) | __riscv_vsoxei8_v_f64m8_m(__VA_ARGS__) |
| #define vsoxei8_v_i16m1 | ( | ... | ) | __riscv_vsoxei8_v_i16m1(__VA_ARGS__) |
| #define vsoxei8_v_i16m1_m | ( | ... | ) | __riscv_vsoxei8_v_i16m1_m(__VA_ARGS__) |
| #define vsoxei8_v_i16m2 | ( | ... | ) | __riscv_vsoxei8_v_i16m2(__VA_ARGS__) |
| #define vsoxei8_v_i16m2_m | ( | ... | ) | __riscv_vsoxei8_v_i16m2_m(__VA_ARGS__) |
| #define vsoxei8_v_i16m4 | ( | ... | ) | __riscv_vsoxei8_v_i16m4(__VA_ARGS__) |
| #define vsoxei8_v_i16m4_m | ( | ... | ) | __riscv_vsoxei8_v_i16m4_m(__VA_ARGS__) |
| #define vsoxei8_v_i16m8 | ( | ... | ) | __riscv_vsoxei8_v_i16m8(__VA_ARGS__) |
| #define vsoxei8_v_i16m8_m | ( | ... | ) | __riscv_vsoxei8_v_i16m8_m(__VA_ARGS__) |
| #define vsoxei8_v_i16mf2 | ( | ... | ) | __riscv_vsoxei8_v_i16mf2(__VA_ARGS__) |
| #define vsoxei8_v_i16mf2_m | ( | ... | ) | __riscv_vsoxei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxei8_v_i16mf4 | ( | ... | ) | __riscv_vsoxei8_v_i16mf4(__VA_ARGS__) |
| #define vsoxei8_v_i16mf4_m | ( | ... | ) | __riscv_vsoxei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxei8_v_i32m1 | ( | ... | ) | __riscv_vsoxei8_v_i32m1(__VA_ARGS__) |
| #define vsoxei8_v_i32m1_m | ( | ... | ) | __riscv_vsoxei8_v_i32m1_m(__VA_ARGS__) |
| #define vsoxei8_v_i32m2 | ( | ... | ) | __riscv_vsoxei8_v_i32m2(__VA_ARGS__) |
| #define vsoxei8_v_i32m2_m | ( | ... | ) | __riscv_vsoxei8_v_i32m2_m(__VA_ARGS__) |
| #define vsoxei8_v_i32m4 | ( | ... | ) | __riscv_vsoxei8_v_i32m4(__VA_ARGS__) |
| #define vsoxei8_v_i32m4_m | ( | ... | ) | __riscv_vsoxei8_v_i32m4_m(__VA_ARGS__) |
| #define vsoxei8_v_i32m8 | ( | ... | ) | __riscv_vsoxei8_v_i32m8(__VA_ARGS__) |
| #define vsoxei8_v_i32m8_m | ( | ... | ) | __riscv_vsoxei8_v_i32m8_m(__VA_ARGS__) |
| #define vsoxei8_v_i32mf2 | ( | ... | ) | __riscv_vsoxei8_v_i32mf2(__VA_ARGS__) |
| #define vsoxei8_v_i32mf2_m | ( | ... | ) | __riscv_vsoxei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxei8_v_i64m1 | ( | ... | ) | __riscv_vsoxei8_v_i64m1(__VA_ARGS__) |
| #define vsoxei8_v_i64m1_m | ( | ... | ) | __riscv_vsoxei8_v_i64m1_m(__VA_ARGS__) |
| #define vsoxei8_v_i64m2 | ( | ... | ) | __riscv_vsoxei8_v_i64m2(__VA_ARGS__) |
| #define vsoxei8_v_i64m2_m | ( | ... | ) | __riscv_vsoxei8_v_i64m2_m(__VA_ARGS__) |
| #define vsoxei8_v_i64m4 | ( | ... | ) | __riscv_vsoxei8_v_i64m4(__VA_ARGS__) |
| #define vsoxei8_v_i64m4_m | ( | ... | ) | __riscv_vsoxei8_v_i64m4_m(__VA_ARGS__) |
| #define vsoxei8_v_i64m8 | ( | ... | ) | __riscv_vsoxei8_v_i64m8(__VA_ARGS__) |
| #define vsoxei8_v_i64m8_m | ( | ... | ) | __riscv_vsoxei8_v_i64m8_m(__VA_ARGS__) |
| #define vsoxei8_v_i8m1 | ( | ... | ) | __riscv_vsoxei8_v_i8m1(__VA_ARGS__) |
| #define vsoxei8_v_i8m1_m | ( | ... | ) | __riscv_vsoxei8_v_i8m1_m(__VA_ARGS__) |
| #define vsoxei8_v_i8m2 | ( | ... | ) | __riscv_vsoxei8_v_i8m2(__VA_ARGS__) |
| #define vsoxei8_v_i8m2_m | ( | ... | ) | __riscv_vsoxei8_v_i8m2_m(__VA_ARGS__) |
| #define vsoxei8_v_i8m4 | ( | ... | ) | __riscv_vsoxei8_v_i8m4(__VA_ARGS__) |
| #define vsoxei8_v_i8m4_m | ( | ... | ) | __riscv_vsoxei8_v_i8m4_m(__VA_ARGS__) |
| #define vsoxei8_v_i8m8 | ( | ... | ) | __riscv_vsoxei8_v_i8m8(__VA_ARGS__) |
| #define vsoxei8_v_i8m8_m | ( | ... | ) | __riscv_vsoxei8_v_i8m8_m(__VA_ARGS__) |
| #define vsoxei8_v_i8mf2 | ( | ... | ) | __riscv_vsoxei8_v_i8mf2(__VA_ARGS__) |
| #define vsoxei8_v_i8mf2_m | ( | ... | ) | __riscv_vsoxei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxei8_v_i8mf4 | ( | ... | ) | __riscv_vsoxei8_v_i8mf4(__VA_ARGS__) |
| #define vsoxei8_v_i8mf4_m | ( | ... | ) | __riscv_vsoxei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxei8_v_i8mf8 | ( | ... | ) | __riscv_vsoxei8_v_i8mf8(__VA_ARGS__) |
| #define vsoxei8_v_i8mf8_m | ( | ... | ) | __riscv_vsoxei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxei8_v_u16m1 | ( | ... | ) | __riscv_vsoxei8_v_u16m1(__VA_ARGS__) |
| #define vsoxei8_v_u16m1_m | ( | ... | ) | __riscv_vsoxei8_v_u16m1_m(__VA_ARGS__) |
| #define vsoxei8_v_u16m2 | ( | ... | ) | __riscv_vsoxei8_v_u16m2(__VA_ARGS__) |
| #define vsoxei8_v_u16m2_m | ( | ... | ) | __riscv_vsoxei8_v_u16m2_m(__VA_ARGS__) |
| #define vsoxei8_v_u16m4 | ( | ... | ) | __riscv_vsoxei8_v_u16m4(__VA_ARGS__) |
| #define vsoxei8_v_u16m4_m | ( | ... | ) | __riscv_vsoxei8_v_u16m4_m(__VA_ARGS__) |
| #define vsoxei8_v_u16m8 | ( | ... | ) | __riscv_vsoxei8_v_u16m8(__VA_ARGS__) |
| #define vsoxei8_v_u16m8_m | ( | ... | ) | __riscv_vsoxei8_v_u16m8_m(__VA_ARGS__) |
| #define vsoxei8_v_u16mf2 | ( | ... | ) | __riscv_vsoxei8_v_u16mf2(__VA_ARGS__) |
| #define vsoxei8_v_u16mf2_m | ( | ... | ) | __riscv_vsoxei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxei8_v_u16mf4 | ( | ... | ) | __riscv_vsoxei8_v_u16mf4(__VA_ARGS__) |
| #define vsoxei8_v_u16mf4_m | ( | ... | ) | __riscv_vsoxei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxei8_v_u32m1 | ( | ... | ) | __riscv_vsoxei8_v_u32m1(__VA_ARGS__) |
| #define vsoxei8_v_u32m1_m | ( | ... | ) | __riscv_vsoxei8_v_u32m1_m(__VA_ARGS__) |
| #define vsoxei8_v_u32m2 | ( | ... | ) | __riscv_vsoxei8_v_u32m2(__VA_ARGS__) |
| #define vsoxei8_v_u32m2_m | ( | ... | ) | __riscv_vsoxei8_v_u32m2_m(__VA_ARGS__) |
| #define vsoxei8_v_u32m4 | ( | ... | ) | __riscv_vsoxei8_v_u32m4(__VA_ARGS__) |
| #define vsoxei8_v_u32m4_m | ( | ... | ) | __riscv_vsoxei8_v_u32m4_m(__VA_ARGS__) |
| #define vsoxei8_v_u32m8 | ( | ... | ) | __riscv_vsoxei8_v_u32m8(__VA_ARGS__) |
| #define vsoxei8_v_u32m8_m | ( | ... | ) | __riscv_vsoxei8_v_u32m8_m(__VA_ARGS__) |
| #define vsoxei8_v_u32mf2 | ( | ... | ) | __riscv_vsoxei8_v_u32mf2(__VA_ARGS__) |
| #define vsoxei8_v_u32mf2_m | ( | ... | ) | __riscv_vsoxei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxei8_v_u64m1 | ( | ... | ) | __riscv_vsoxei8_v_u64m1(__VA_ARGS__) |
| #define vsoxei8_v_u64m1_m | ( | ... | ) | __riscv_vsoxei8_v_u64m1_m(__VA_ARGS__) |
| #define vsoxei8_v_u64m2 | ( | ... | ) | __riscv_vsoxei8_v_u64m2(__VA_ARGS__) |
| #define vsoxei8_v_u64m2_m | ( | ... | ) | __riscv_vsoxei8_v_u64m2_m(__VA_ARGS__) |
| #define vsoxei8_v_u64m4 | ( | ... | ) | __riscv_vsoxei8_v_u64m4(__VA_ARGS__) |
| #define vsoxei8_v_u64m4_m | ( | ... | ) | __riscv_vsoxei8_v_u64m4_m(__VA_ARGS__) |
| #define vsoxei8_v_u64m8 | ( | ... | ) | __riscv_vsoxei8_v_u64m8(__VA_ARGS__) |
| #define vsoxei8_v_u64m8_m | ( | ... | ) | __riscv_vsoxei8_v_u64m8_m(__VA_ARGS__) |
| #define vsoxei8_v_u8m1 | ( | ... | ) | __riscv_vsoxei8_v_u8m1(__VA_ARGS__) |
| #define vsoxei8_v_u8m1_m | ( | ... | ) | __riscv_vsoxei8_v_u8m1_m(__VA_ARGS__) |
| #define vsoxei8_v_u8m2 | ( | ... | ) | __riscv_vsoxei8_v_u8m2(__VA_ARGS__) |
| #define vsoxei8_v_u8m2_m | ( | ... | ) | __riscv_vsoxei8_v_u8m2_m(__VA_ARGS__) |
| #define vsoxei8_v_u8m4 | ( | ... | ) | __riscv_vsoxei8_v_u8m4(__VA_ARGS__) |
| #define vsoxei8_v_u8m4_m | ( | ... | ) | __riscv_vsoxei8_v_u8m4_m(__VA_ARGS__) |
| #define vsoxei8_v_u8m8 | ( | ... | ) | __riscv_vsoxei8_v_u8m8(__VA_ARGS__) |
| #define vsoxei8_v_u8m8_m | ( | ... | ) | __riscv_vsoxei8_v_u8m8_m(__VA_ARGS__) |
| #define vsoxei8_v_u8mf2 | ( | ... | ) | __riscv_vsoxei8_v_u8mf2(__VA_ARGS__) |
| #define vsoxei8_v_u8mf2_m | ( | ... | ) | __riscv_vsoxei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxei8_v_u8mf4 | ( | ... | ) | __riscv_vsoxei8_v_u8mf4(__VA_ARGS__) |
| #define vsoxei8_v_u8mf4_m | ( | ... | ) | __riscv_vsoxei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxei8_v_u8mf8 | ( | ... | ) | __riscv_vsoxei8_v_u8mf8(__VA_ARGS__) |
| #define vsoxei8_v_u8mf8_m | ( | ... | ) | __riscv_vsoxei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f32m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f32m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f32m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f32m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f32m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f32m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f32m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f64m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f64m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f64m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f64m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f64m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_f64m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_f64m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_f64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i32m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i32m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i32m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i32m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i32m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i32m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i32m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i64m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i64m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i64m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i64m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i64m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i64m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i64m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg2ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u32m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u32m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u32m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u32m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u32m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u32m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u32m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u64m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u64m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u64m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u64m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u64m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u64m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u64m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8m1 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8m1(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8m2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8m2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8m4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8m4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8m4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8m4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg2ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg2ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f32m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f32m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f32m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f32m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f32m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f32m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f32m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f64m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f64m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f64m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f64m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f64m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_f64m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_f64m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_f64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i32m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i32m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i32m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i32m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i32m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i32m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i32m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i64m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i64m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i64m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i64m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i64m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i64m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i64m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg2ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u32m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u32m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u32m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u32m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u32m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u32m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u32m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u64m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u64m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u64m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u64m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u64m4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u64m4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u64m4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8m1 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8m1(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8m2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8m2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg2ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg2ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f16m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f16m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f16m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f16m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f32m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f32m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f32m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f32m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f32m4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f32m4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f32m4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f64m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f64m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f64m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f64m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f64m4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_f64m4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_f64m4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_f64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i16m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i16m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i16m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i16m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i32m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i32m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i32m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i32m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i32m4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i32m4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i32m4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i64m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i64m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i64m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i64m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i64m4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i64m4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i64m4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i8m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i8m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg2ei64_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg2ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u16m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u16m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u16m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u16m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u32m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u32m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u32m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u32m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u32m4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u32m4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u32m4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u64m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u64m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u64m2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u64m2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u64m4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u64m4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u64m4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u8m1 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u8m1(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg2ei64_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg2ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg2ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f32m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f32m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f32m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f32m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f32m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f32m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f32m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f64m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f64m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f64m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f64m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f64m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_f64m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_f64m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_f64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i32m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i32m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i32m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i32m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i32m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i32m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i32m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i64m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i64m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i64m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i64m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i64m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i64m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i64m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg2ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u32m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u32m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u32m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u32m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u32m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u32m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u32m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u32m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u64m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u64m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u64m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u64m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u64m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u64m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u64m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u64m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8m1 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8m1(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8m2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8m2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8m4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8m4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8m4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8m4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg2ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg2ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f16m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f16m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f16m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f16m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f32m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f32m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f32m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f32m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f64m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f64m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f64m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_f64m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i16m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i16m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i16m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i16m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i32m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i32m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i32m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i32m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i64m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i64m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i64m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i64m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg3ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u16m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u16m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u16m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u16m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u32m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u32m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u32m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u32m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u64m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u64m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u64m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u64m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8m1 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8m1(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8m2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8m2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg3ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg3ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f16m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f16m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f16m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f16m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f32m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f32m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f32m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f32m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f64m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f64m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f64m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_f64m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i16m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i16m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i16m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i16m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i32m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i32m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i32m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i32m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i64m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i64m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i64m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i64m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg3ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u16m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u16m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u16m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u16m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u32m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u32m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u32m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u32m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u64m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u64m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u64m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u64m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8m1 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8m1(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8m2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8m2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg3ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg3ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f16m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f16m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f16m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f16m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f32m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f32m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f32m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f32m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f64m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f64m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f64m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_f64m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i16m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i16m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i16m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i16m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i32m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i32m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i32m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i32m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i64m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i64m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i64m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i64m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i8m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i8m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg3ei64_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg3ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u16m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u16m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u16m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u16m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u32m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u32m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u32m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u32m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u64m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u64m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u64m2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u64m2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u8m1 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u8m1(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg3ei64_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg3ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg3ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f16m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f16m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f16m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f16m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f32m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f32m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f32m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f32m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f64m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f64m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f64m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_f64m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i16m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i16m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i16m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i16m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i32m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i32m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i32m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i32m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i64m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i64m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i64m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i64m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg3ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u16m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u16m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u16m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u16m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u32m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u32m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u32m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u32m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u64m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u64m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u64m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u64m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8m1 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8m1(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8m2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8m2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg3ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg3ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f16m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f16m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f16m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f16m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f32m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f32m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f32m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f32m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f64m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f64m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f64m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_f64m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i16m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i16m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i16m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i16m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i32m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i32m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i32m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i32m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i64m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i64m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i64m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i64m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg4ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u16m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u16m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u16m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u16m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u32m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u32m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u32m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u32m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u64m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u64m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u64m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u64m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8m1 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8m1(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8m2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8m2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg4ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg4ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f16m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f16m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f16m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f16m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f32m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f32m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f32m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f32m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f64m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f64m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f64m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_f64m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i16m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i16m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i16m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i16m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i32m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i32m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i32m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i32m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i64m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i64m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i64m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i64m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg4ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u16m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u16m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u16m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u16m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u32m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u32m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u32m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u32m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u64m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u64m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u64m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u64m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8m1 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8m1(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8m2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8m2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg4ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg4ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f16m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f16m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f16m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f16m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f32m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f32m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f32m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f32m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f64m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f64m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f64m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_f64m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i16m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i16m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i16m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i16m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i32m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i32m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i32m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i32m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i64m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i64m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i64m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i64m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i8m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i8m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg4ei64_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg4ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u16m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u16m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u16m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u16m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u32m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u32m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u32m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u32m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u64m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u64m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u64m2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u64m2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u8m1 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u8m1(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg4ei64_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg4ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg4ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f16m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f16m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f16m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f16m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f16m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f32m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f32m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f32m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f32m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f32m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f64m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f64m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f64m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_f64m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_f64m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_f64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i16m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i16m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i16m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i16m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i16m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i32m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i32m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i32m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i32m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i32m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i64m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i64m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i64m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i64m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i64m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg4ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u16m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u16m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u16m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u16m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u16m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u16m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u32m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u32m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u32m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u32m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u32m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u32m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u64m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u64m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u64m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u64m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u64m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u64m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8m1 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8m1(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8m2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8m2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8m2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8m2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg4ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg4ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f16m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_f16m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg5ei16_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg5ei16_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f32m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_f32m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg5ei16_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f64m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_f64m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i16m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i16m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i32m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i32m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i64m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i64m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i8m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i8m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg5ei16_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg5ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u16m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u16m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u32m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u32m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u64m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u64m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u8m1 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u8m1(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg5ei16_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg5ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg5ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f16m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_f16m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg5ei32_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg5ei32_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f32m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_f32m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg5ei32_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f64m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_f64m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i16m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i16m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i32m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i32m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i64m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i64m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i8m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i8m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg5ei32_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg5ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u16m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u16m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u32m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u32m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u64m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u64m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u8m1 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u8m1(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg5ei32_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg5ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg5ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f16m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_f16m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg5ei64_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg5ei64_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f32m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_f32m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg5ei64_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f64m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_f64m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i16m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i16m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i32m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i32m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i64m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i64m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i8m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i8m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg5ei64_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg5ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u16m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u16m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u32m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u32m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u64m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u64m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u8m1 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u8m1(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg5ei64_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg5ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg5ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f16m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_f16m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg5ei8_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg5ei8_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f32m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_f32m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg5ei8_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f64m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_f64m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i16m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i16m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i32m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i32m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i64m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i64m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i8m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i8m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg5ei8_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg5ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u16m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u16m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u32m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u32m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u64m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u64m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u8m1 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u8m1(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg5ei8_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg5ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg5ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f16m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_f16m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg6ei16_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg6ei16_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f32m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_f32m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg6ei16_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f64m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_f64m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i16m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i16m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i32m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i32m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i64m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i64m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i8m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i8m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg6ei16_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg6ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u16m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u16m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u32m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u32m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u64m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u64m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u8m1 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u8m1(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg6ei16_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg6ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg6ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f16m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_f16m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg6ei32_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg6ei32_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f32m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_f32m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg6ei32_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f64m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_f64m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i16m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i16m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i32m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i32m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i64m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i64m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i8m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i8m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg6ei32_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg6ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u16m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u16m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u32m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u32m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u64m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u64m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u8m1 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u8m1(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg6ei32_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg6ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg6ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f16m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_f16m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg6ei64_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg6ei64_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f32m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_f32m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg6ei64_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f64m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_f64m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i16m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i16m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i32m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i32m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i64m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i64m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i8m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i8m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg6ei64_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg6ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u16m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u16m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u32m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u32m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u64m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u64m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u8m1 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u8m1(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg6ei64_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg6ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg6ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f16m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_f16m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg6ei8_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg6ei8_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f32m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_f32m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg6ei8_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f64m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_f64m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i16m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i16m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i32m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i32m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i64m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i64m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i8m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i8m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg6ei8_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg6ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u16m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u16m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u32m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u32m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u64m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u64m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u8m1 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u8m1(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg6ei8_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg6ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg6ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f16m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_f16m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg7ei16_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg7ei16_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f32m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_f32m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg7ei16_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f64m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_f64m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i16m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i16m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i32m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i32m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i64m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i64m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i8m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i8m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg7ei16_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg7ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u16m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u16m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u32m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u32m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u64m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u64m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u8m1 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u8m1(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg7ei16_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg7ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg7ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f16m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_f16m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg7ei32_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg7ei32_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f32m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_f32m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg7ei32_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f64m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_f64m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i16m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i16m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i32m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i32m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i64m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i64m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i8m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i8m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg7ei32_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg7ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u16m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u16m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u32m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u32m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u64m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u64m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u8m1 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u8m1(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg7ei32_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg7ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg7ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f16m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_f16m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg7ei64_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg7ei64_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f32m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_f32m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg7ei64_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f64m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_f64m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i16m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i16m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i32m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i32m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i64m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i64m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i8m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i8m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg7ei64_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg7ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u16m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u16m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u32m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u32m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u64m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u64m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u8m1 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u8m1(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg7ei64_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg7ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg7ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f16m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_f16m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg7ei8_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg7ei8_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f32m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_f32m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg7ei8_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f64m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_f64m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i16m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i16m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i32m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i32m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i64m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i64m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i8m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i8m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg7ei8_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg7ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u16m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u16m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u32m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u32m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u64m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u64m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u8m1 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u8m1(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg7ei8_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg7ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg7ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f16m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_f16m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg8ei16_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg8ei16_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f32m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_f32m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg8ei16_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f64m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_f64m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i16m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i16m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i32m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i32m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i64m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i64m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i8m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i8m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg8ei16_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg8ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u16m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u16m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u32m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u32m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u64m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u64m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u8m1 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u8m1(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg8ei16_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg8ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg8ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f16m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_f16m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg8ei32_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg8ei32_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f32m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_f32m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg8ei32_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f64m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_f64m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i16m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i16m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i32m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i32m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i64m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i64m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i8m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i8m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg8ei32_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg8ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u16m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u16m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u32m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u32m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u64m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u64m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u8m1 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u8m1(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg8ei32_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg8ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg8ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f16m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_f16m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg8ei64_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg8ei64_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f32m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_f32m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg8ei64_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f64m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_f64m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i16m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i16m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i32m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i32m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i64m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i64m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i8m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i8m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg8ei64_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg8ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u16m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u16m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u32m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u32m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u64m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u64m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u8m1 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u8m1(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg8ei64_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg8ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg8ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f16m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_f16m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f16m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f16mf2 | ( | ... | ) | __riscv_vsoxseg8ei8_v_f16mf2(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f16mf4 | ( | ... | ) | __riscv_vsoxseg8ei8_v_f16mf4(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f32m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_f32m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f32m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f32mf2 | ( | ... | ) | __riscv_vsoxseg8ei8_v_f32mf2(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f64m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_f64m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_f64m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i16m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i16m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i16m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i16mf2 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i16mf2(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i16mf4 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i16mf4(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i32m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i32m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i32m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i32mf2 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i32mf2(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i64m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i64m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i64m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i8m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i8m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i8m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i8mf2 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i8mf2(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i8mf4 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i8mf4(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i8mf8 | ( | ... | ) | __riscv_vsoxseg8ei8_v_i8mf8(__VA_ARGS__) |
| #define vsoxseg8ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u16m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u16m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u16m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u16mf2 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u16mf2(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u16mf4 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u16mf4(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u32m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u32m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u32m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u32mf2 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u32mf2(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u64m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u64m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u64m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u8m1 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u8m1(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u8m1_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u8mf2 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u8mf2(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u8mf4 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u8mf4(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u8mf8 | ( | ... | ) | __riscv_vsoxseg8ei8_v_u8mf8(__VA_ARGS__) |
| #define vsoxseg8ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsoxseg8ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsra_vv_i16m1 | ( | ... | ) | __riscv_vsra_vv_i16m1(__VA_ARGS__) |
| #define vsra_vv_i16m1_m | ( | ... | ) | __riscv_vsra_vv_i16m1_tumu(__VA_ARGS__) |
| #define vsra_vv_i16m2 | ( | ... | ) | __riscv_vsra_vv_i16m2(__VA_ARGS__) |
| #define vsra_vv_i16m2_m | ( | ... | ) | __riscv_vsra_vv_i16m2_tumu(__VA_ARGS__) |
| #define vsra_vv_i16m4 | ( | ... | ) | __riscv_vsra_vv_i16m4(__VA_ARGS__) |
| #define vsra_vv_i16m4_m | ( | ... | ) | __riscv_vsra_vv_i16m4_tumu(__VA_ARGS__) |
| #define vsra_vv_i16m8 | ( | ... | ) | __riscv_vsra_vv_i16m8(__VA_ARGS__) |
| #define vsra_vv_i16m8_m | ( | ... | ) | __riscv_vsra_vv_i16m8_tumu(__VA_ARGS__) |
| #define vsra_vv_i16mf2 | ( | ... | ) | __riscv_vsra_vv_i16mf2(__VA_ARGS__) |
| #define vsra_vv_i16mf2_m | ( | ... | ) | __riscv_vsra_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vsra_vv_i16mf4 | ( | ... | ) | __riscv_vsra_vv_i16mf4(__VA_ARGS__) |
| #define vsra_vv_i16mf4_m | ( | ... | ) | __riscv_vsra_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vsra_vv_i32m1 | ( | ... | ) | __riscv_vsra_vv_i32m1(__VA_ARGS__) |
| #define vsra_vv_i32m1_m | ( | ... | ) | __riscv_vsra_vv_i32m1_tumu(__VA_ARGS__) |
| #define vsra_vv_i32m2 | ( | ... | ) | __riscv_vsra_vv_i32m2(__VA_ARGS__) |
| #define vsra_vv_i32m2_m | ( | ... | ) | __riscv_vsra_vv_i32m2_tumu(__VA_ARGS__) |
| #define vsra_vv_i32m4 | ( | ... | ) | __riscv_vsra_vv_i32m4(__VA_ARGS__) |
| #define vsra_vv_i32m4_m | ( | ... | ) | __riscv_vsra_vv_i32m4_tumu(__VA_ARGS__) |
| #define vsra_vv_i32m8 | ( | ... | ) | __riscv_vsra_vv_i32m8(__VA_ARGS__) |
| #define vsra_vv_i32m8_m | ( | ... | ) | __riscv_vsra_vv_i32m8_tumu(__VA_ARGS__) |
| #define vsra_vv_i32mf2 | ( | ... | ) | __riscv_vsra_vv_i32mf2(__VA_ARGS__) |
| #define vsra_vv_i32mf2_m | ( | ... | ) | __riscv_vsra_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vsra_vv_i64m1 | ( | ... | ) | __riscv_vsra_vv_i64m1(__VA_ARGS__) |
| #define vsra_vv_i64m1_m | ( | ... | ) | __riscv_vsra_vv_i64m1_tumu(__VA_ARGS__) |
| #define vsra_vv_i64m2 | ( | ... | ) | __riscv_vsra_vv_i64m2(__VA_ARGS__) |
| #define vsra_vv_i64m2_m | ( | ... | ) | __riscv_vsra_vv_i64m2_tumu(__VA_ARGS__) |
| #define vsra_vv_i64m4 | ( | ... | ) | __riscv_vsra_vv_i64m4(__VA_ARGS__) |
| #define vsra_vv_i64m4_m | ( | ... | ) | __riscv_vsra_vv_i64m4_tumu(__VA_ARGS__) |
| #define vsra_vv_i64m8 | ( | ... | ) | __riscv_vsra_vv_i64m8(__VA_ARGS__) |
| #define vsra_vv_i64m8_m | ( | ... | ) | __riscv_vsra_vv_i64m8_tumu(__VA_ARGS__) |
| #define vsra_vv_i8m1 | ( | ... | ) | __riscv_vsra_vv_i8m1(__VA_ARGS__) |
| #define vsra_vv_i8m1_m | ( | ... | ) | __riscv_vsra_vv_i8m1_tumu(__VA_ARGS__) |
| #define vsra_vv_i8m2 | ( | ... | ) | __riscv_vsra_vv_i8m2(__VA_ARGS__) |
| #define vsra_vv_i8m2_m | ( | ... | ) | __riscv_vsra_vv_i8m2_tumu(__VA_ARGS__) |
| #define vsra_vv_i8m4 | ( | ... | ) | __riscv_vsra_vv_i8m4(__VA_ARGS__) |
| #define vsra_vv_i8m4_m | ( | ... | ) | __riscv_vsra_vv_i8m4_tumu(__VA_ARGS__) |
| #define vsra_vv_i8m8 | ( | ... | ) | __riscv_vsra_vv_i8m8(__VA_ARGS__) |
| #define vsra_vv_i8m8_m | ( | ... | ) | __riscv_vsra_vv_i8m8_tumu(__VA_ARGS__) |
| #define vsra_vv_i8mf2 | ( | ... | ) | __riscv_vsra_vv_i8mf2(__VA_ARGS__) |
| #define vsra_vv_i8mf2_m | ( | ... | ) | __riscv_vsra_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vsra_vv_i8mf4 | ( | ... | ) | __riscv_vsra_vv_i8mf4(__VA_ARGS__) |
| #define vsra_vv_i8mf4_m | ( | ... | ) | __riscv_vsra_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vsra_vv_i8mf8 | ( | ... | ) | __riscv_vsra_vv_i8mf8(__VA_ARGS__) |
| #define vsra_vv_i8mf8_m | ( | ... | ) | __riscv_vsra_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vsra_vx_i16m1 | ( | ... | ) | __riscv_vsra_vx_i16m1(__VA_ARGS__) |
| #define vsra_vx_i16m1_m | ( | ... | ) | __riscv_vsra_vx_i16m1_tumu(__VA_ARGS__) |
| #define vsra_vx_i16m2 | ( | ... | ) | __riscv_vsra_vx_i16m2(__VA_ARGS__) |
| #define vsra_vx_i16m2_m | ( | ... | ) | __riscv_vsra_vx_i16m2_tumu(__VA_ARGS__) |
| #define vsra_vx_i16m4 | ( | ... | ) | __riscv_vsra_vx_i16m4(__VA_ARGS__) |
| #define vsra_vx_i16m4_m | ( | ... | ) | __riscv_vsra_vx_i16m4_tumu(__VA_ARGS__) |
| #define vsra_vx_i16m8 | ( | ... | ) | __riscv_vsra_vx_i16m8(__VA_ARGS__) |
| #define vsra_vx_i16m8_m | ( | ... | ) | __riscv_vsra_vx_i16m8_tumu(__VA_ARGS__) |
| #define vsra_vx_i16mf2 | ( | ... | ) | __riscv_vsra_vx_i16mf2(__VA_ARGS__) |
| #define vsra_vx_i16mf2_m | ( | ... | ) | __riscv_vsra_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vsra_vx_i16mf4 | ( | ... | ) | __riscv_vsra_vx_i16mf4(__VA_ARGS__) |
| #define vsra_vx_i16mf4_m | ( | ... | ) | __riscv_vsra_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vsra_vx_i32m1 | ( | ... | ) | __riscv_vsra_vx_i32m1(__VA_ARGS__) |
| #define vsra_vx_i32m1_m | ( | ... | ) | __riscv_vsra_vx_i32m1_tumu(__VA_ARGS__) |
| #define vsra_vx_i32m2 | ( | ... | ) | __riscv_vsra_vx_i32m2(__VA_ARGS__) |
| #define vsra_vx_i32m2_m | ( | ... | ) | __riscv_vsra_vx_i32m2_tumu(__VA_ARGS__) |
| #define vsra_vx_i32m4 | ( | ... | ) | __riscv_vsra_vx_i32m4(__VA_ARGS__) |
| #define vsra_vx_i32m4_m | ( | ... | ) | __riscv_vsra_vx_i32m4_tumu(__VA_ARGS__) |
| #define vsra_vx_i32m8 | ( | ... | ) | __riscv_vsra_vx_i32m8(__VA_ARGS__) |
| #define vsra_vx_i32m8_m | ( | ... | ) | __riscv_vsra_vx_i32m8_tumu(__VA_ARGS__) |
| #define vsra_vx_i32mf2 | ( | ... | ) | __riscv_vsra_vx_i32mf2(__VA_ARGS__) |
| #define vsra_vx_i32mf2_m | ( | ... | ) | __riscv_vsra_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vsra_vx_i64m1 | ( | ... | ) | __riscv_vsra_vx_i64m1(__VA_ARGS__) |
| #define vsra_vx_i64m1_m | ( | ... | ) | __riscv_vsra_vx_i64m1_tumu(__VA_ARGS__) |
| #define vsra_vx_i64m2 | ( | ... | ) | __riscv_vsra_vx_i64m2(__VA_ARGS__) |
| #define vsra_vx_i64m2_m | ( | ... | ) | __riscv_vsra_vx_i64m2_tumu(__VA_ARGS__) |
| #define vsra_vx_i64m4 | ( | ... | ) | __riscv_vsra_vx_i64m4(__VA_ARGS__) |
| #define vsra_vx_i64m4_m | ( | ... | ) | __riscv_vsra_vx_i64m4_tumu(__VA_ARGS__) |
| #define vsra_vx_i64m8 | ( | ... | ) | __riscv_vsra_vx_i64m8(__VA_ARGS__) |
| #define vsra_vx_i64m8_m | ( | ... | ) | __riscv_vsra_vx_i64m8_tumu(__VA_ARGS__) |
| #define vsra_vx_i8m1 | ( | ... | ) | __riscv_vsra_vx_i8m1(__VA_ARGS__) |
| #define vsra_vx_i8m1_m | ( | ... | ) | __riscv_vsra_vx_i8m1_tumu(__VA_ARGS__) |
| #define vsra_vx_i8m2 | ( | ... | ) | __riscv_vsra_vx_i8m2(__VA_ARGS__) |
| #define vsra_vx_i8m2_m | ( | ... | ) | __riscv_vsra_vx_i8m2_tumu(__VA_ARGS__) |
| #define vsra_vx_i8m4 | ( | ... | ) | __riscv_vsra_vx_i8m4(__VA_ARGS__) |
| #define vsra_vx_i8m4_m | ( | ... | ) | __riscv_vsra_vx_i8m4_tumu(__VA_ARGS__) |
| #define vsra_vx_i8m8 | ( | ... | ) | __riscv_vsra_vx_i8m8(__VA_ARGS__) |
| #define vsra_vx_i8m8_m | ( | ... | ) | __riscv_vsra_vx_i8m8_tumu(__VA_ARGS__) |
| #define vsra_vx_i8mf2 | ( | ... | ) | __riscv_vsra_vx_i8mf2(__VA_ARGS__) |
| #define vsra_vx_i8mf2_m | ( | ... | ) | __riscv_vsra_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vsra_vx_i8mf4 | ( | ... | ) | __riscv_vsra_vx_i8mf4(__VA_ARGS__) |
| #define vsra_vx_i8mf4_m | ( | ... | ) | __riscv_vsra_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vsra_vx_i8mf8 | ( | ... | ) | __riscv_vsra_vx_i8mf8(__VA_ARGS__) |
| #define vsra_vx_i8mf8_m | ( | ... | ) | __riscv_vsra_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vsrl_vv_u16m1 | ( | ... | ) | __riscv_vsrl_vv_u16m1(__VA_ARGS__) |
| #define vsrl_vv_u16m1_m | ( | ... | ) | __riscv_vsrl_vv_u16m1_tumu(__VA_ARGS__) |
| #define vsrl_vv_u16m2 | ( | ... | ) | __riscv_vsrl_vv_u16m2(__VA_ARGS__) |
| #define vsrl_vv_u16m2_m | ( | ... | ) | __riscv_vsrl_vv_u16m2_tumu(__VA_ARGS__) |
| #define vsrl_vv_u16m4 | ( | ... | ) | __riscv_vsrl_vv_u16m4(__VA_ARGS__) |
| #define vsrl_vv_u16m4_m | ( | ... | ) | __riscv_vsrl_vv_u16m4_tumu(__VA_ARGS__) |
| #define vsrl_vv_u16m8 | ( | ... | ) | __riscv_vsrl_vv_u16m8(__VA_ARGS__) |
| #define vsrl_vv_u16m8_m | ( | ... | ) | __riscv_vsrl_vv_u16m8_tumu(__VA_ARGS__) |
| #define vsrl_vv_u16mf2 | ( | ... | ) | __riscv_vsrl_vv_u16mf2(__VA_ARGS__) |
| #define vsrl_vv_u16mf2_m | ( | ... | ) | __riscv_vsrl_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vsrl_vv_u16mf4 | ( | ... | ) | __riscv_vsrl_vv_u16mf4(__VA_ARGS__) |
| #define vsrl_vv_u16mf4_m | ( | ... | ) | __riscv_vsrl_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vsrl_vv_u32m1 | ( | ... | ) | __riscv_vsrl_vv_u32m1(__VA_ARGS__) |
| #define vsrl_vv_u32m1_m | ( | ... | ) | __riscv_vsrl_vv_u32m1_tumu(__VA_ARGS__) |
| #define vsrl_vv_u32m2 | ( | ... | ) | __riscv_vsrl_vv_u32m2(__VA_ARGS__) |
| #define vsrl_vv_u32m2_m | ( | ... | ) | __riscv_vsrl_vv_u32m2_tumu(__VA_ARGS__) |
| #define vsrl_vv_u32m4 | ( | ... | ) | __riscv_vsrl_vv_u32m4(__VA_ARGS__) |
| #define vsrl_vv_u32m4_m | ( | ... | ) | __riscv_vsrl_vv_u32m4_tumu(__VA_ARGS__) |
| #define vsrl_vv_u32m8 | ( | ... | ) | __riscv_vsrl_vv_u32m8(__VA_ARGS__) |
| #define vsrl_vv_u32m8_m | ( | ... | ) | __riscv_vsrl_vv_u32m8_tumu(__VA_ARGS__) |
| #define vsrl_vv_u32mf2 | ( | ... | ) | __riscv_vsrl_vv_u32mf2(__VA_ARGS__) |
| #define vsrl_vv_u32mf2_m | ( | ... | ) | __riscv_vsrl_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vsrl_vv_u64m1 | ( | ... | ) | __riscv_vsrl_vv_u64m1(__VA_ARGS__) |
| #define vsrl_vv_u64m1_m | ( | ... | ) | __riscv_vsrl_vv_u64m1_tumu(__VA_ARGS__) |
| #define vsrl_vv_u64m2 | ( | ... | ) | __riscv_vsrl_vv_u64m2(__VA_ARGS__) |
| #define vsrl_vv_u64m2_m | ( | ... | ) | __riscv_vsrl_vv_u64m2_tumu(__VA_ARGS__) |
| #define vsrl_vv_u64m4 | ( | ... | ) | __riscv_vsrl_vv_u64m4(__VA_ARGS__) |
| #define vsrl_vv_u64m4_m | ( | ... | ) | __riscv_vsrl_vv_u64m4_tumu(__VA_ARGS__) |
| #define vsrl_vv_u64m8 | ( | ... | ) | __riscv_vsrl_vv_u64m8(__VA_ARGS__) |
| #define vsrl_vv_u64m8_m | ( | ... | ) | __riscv_vsrl_vv_u64m8_tumu(__VA_ARGS__) |
| #define vsrl_vv_u8m1 | ( | ... | ) | __riscv_vsrl_vv_u8m1(__VA_ARGS__) |
| #define vsrl_vv_u8m1_m | ( | ... | ) | __riscv_vsrl_vv_u8m1_tumu(__VA_ARGS__) |
| #define vsrl_vv_u8m2 | ( | ... | ) | __riscv_vsrl_vv_u8m2(__VA_ARGS__) |
| #define vsrl_vv_u8m2_m | ( | ... | ) | __riscv_vsrl_vv_u8m2_tumu(__VA_ARGS__) |
| #define vsrl_vv_u8m4 | ( | ... | ) | __riscv_vsrl_vv_u8m4(__VA_ARGS__) |
| #define vsrl_vv_u8m4_m | ( | ... | ) | __riscv_vsrl_vv_u8m4_tumu(__VA_ARGS__) |
| #define vsrl_vv_u8m8 | ( | ... | ) | __riscv_vsrl_vv_u8m8(__VA_ARGS__) |
| #define vsrl_vv_u8m8_m | ( | ... | ) | __riscv_vsrl_vv_u8m8_tumu(__VA_ARGS__) |
| #define vsrl_vv_u8mf2 | ( | ... | ) | __riscv_vsrl_vv_u8mf2(__VA_ARGS__) |
| #define vsrl_vv_u8mf2_m | ( | ... | ) | __riscv_vsrl_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vsrl_vv_u8mf4 | ( | ... | ) | __riscv_vsrl_vv_u8mf4(__VA_ARGS__) |
| #define vsrl_vv_u8mf4_m | ( | ... | ) | __riscv_vsrl_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vsrl_vv_u8mf8 | ( | ... | ) | __riscv_vsrl_vv_u8mf8(__VA_ARGS__) |
| #define vsrl_vv_u8mf8_m | ( | ... | ) | __riscv_vsrl_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vsrl_vx_u16m1 | ( | ... | ) | __riscv_vsrl_vx_u16m1(__VA_ARGS__) |
| #define vsrl_vx_u16m1_m | ( | ... | ) | __riscv_vsrl_vx_u16m1_tumu(__VA_ARGS__) |
| #define vsrl_vx_u16m2 | ( | ... | ) | __riscv_vsrl_vx_u16m2(__VA_ARGS__) |
| #define vsrl_vx_u16m2_m | ( | ... | ) | __riscv_vsrl_vx_u16m2_tumu(__VA_ARGS__) |
| #define vsrl_vx_u16m4 | ( | ... | ) | __riscv_vsrl_vx_u16m4(__VA_ARGS__) |
| #define vsrl_vx_u16m4_m | ( | ... | ) | __riscv_vsrl_vx_u16m4_tumu(__VA_ARGS__) |
| #define vsrl_vx_u16m8 | ( | ... | ) | __riscv_vsrl_vx_u16m8(__VA_ARGS__) |
| #define vsrl_vx_u16m8_m | ( | ... | ) | __riscv_vsrl_vx_u16m8_tumu(__VA_ARGS__) |
| #define vsrl_vx_u16mf2 | ( | ... | ) | __riscv_vsrl_vx_u16mf2(__VA_ARGS__) |
| #define vsrl_vx_u16mf2_m | ( | ... | ) | __riscv_vsrl_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vsrl_vx_u16mf4 | ( | ... | ) | __riscv_vsrl_vx_u16mf4(__VA_ARGS__) |
| #define vsrl_vx_u16mf4_m | ( | ... | ) | __riscv_vsrl_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vsrl_vx_u32m1 | ( | ... | ) | __riscv_vsrl_vx_u32m1(__VA_ARGS__) |
| #define vsrl_vx_u32m1_m | ( | ... | ) | __riscv_vsrl_vx_u32m1_tumu(__VA_ARGS__) |
| #define vsrl_vx_u32m2 | ( | ... | ) | __riscv_vsrl_vx_u32m2(__VA_ARGS__) |
| #define vsrl_vx_u32m2_m | ( | ... | ) | __riscv_vsrl_vx_u32m2_tumu(__VA_ARGS__) |
| #define vsrl_vx_u32m4 | ( | ... | ) | __riscv_vsrl_vx_u32m4(__VA_ARGS__) |
| #define vsrl_vx_u32m4_m | ( | ... | ) | __riscv_vsrl_vx_u32m4_tumu(__VA_ARGS__) |
| #define vsrl_vx_u32m8 | ( | ... | ) | __riscv_vsrl_vx_u32m8(__VA_ARGS__) |
| #define vsrl_vx_u32m8_m | ( | ... | ) | __riscv_vsrl_vx_u32m8_tumu(__VA_ARGS__) |
| #define vsrl_vx_u32mf2 | ( | ... | ) | __riscv_vsrl_vx_u32mf2(__VA_ARGS__) |
| #define vsrl_vx_u32mf2_m | ( | ... | ) | __riscv_vsrl_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vsrl_vx_u64m1 | ( | ... | ) | __riscv_vsrl_vx_u64m1(__VA_ARGS__) |
| #define vsrl_vx_u64m1_m | ( | ... | ) | __riscv_vsrl_vx_u64m1_tumu(__VA_ARGS__) |
| #define vsrl_vx_u64m2 | ( | ... | ) | __riscv_vsrl_vx_u64m2(__VA_ARGS__) |
| #define vsrl_vx_u64m2_m | ( | ... | ) | __riscv_vsrl_vx_u64m2_tumu(__VA_ARGS__) |
| #define vsrl_vx_u64m4 | ( | ... | ) | __riscv_vsrl_vx_u64m4(__VA_ARGS__) |
| #define vsrl_vx_u64m4_m | ( | ... | ) | __riscv_vsrl_vx_u64m4_tumu(__VA_ARGS__) |
| #define vsrl_vx_u64m8 | ( | ... | ) | __riscv_vsrl_vx_u64m8(__VA_ARGS__) |
| #define vsrl_vx_u64m8_m | ( | ... | ) | __riscv_vsrl_vx_u64m8_tumu(__VA_ARGS__) |
| #define vsrl_vx_u8m1 | ( | ... | ) | __riscv_vsrl_vx_u8m1(__VA_ARGS__) |
| #define vsrl_vx_u8m1_m | ( | ... | ) | __riscv_vsrl_vx_u8m1_tumu(__VA_ARGS__) |
| #define vsrl_vx_u8m2 | ( | ... | ) | __riscv_vsrl_vx_u8m2(__VA_ARGS__) |
| #define vsrl_vx_u8m2_m | ( | ... | ) | __riscv_vsrl_vx_u8m2_tumu(__VA_ARGS__) |
| #define vsrl_vx_u8m4 | ( | ... | ) | __riscv_vsrl_vx_u8m4(__VA_ARGS__) |
| #define vsrl_vx_u8m4_m | ( | ... | ) | __riscv_vsrl_vx_u8m4_tumu(__VA_ARGS__) |
| #define vsrl_vx_u8m8 | ( | ... | ) | __riscv_vsrl_vx_u8m8(__VA_ARGS__) |
| #define vsrl_vx_u8m8_m | ( | ... | ) | __riscv_vsrl_vx_u8m8_tumu(__VA_ARGS__) |
| #define vsrl_vx_u8mf2 | ( | ... | ) | __riscv_vsrl_vx_u8mf2(__VA_ARGS__) |
| #define vsrl_vx_u8mf2_m | ( | ... | ) | __riscv_vsrl_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vsrl_vx_u8mf4 | ( | ... | ) | __riscv_vsrl_vx_u8mf4(__VA_ARGS__) |
| #define vsrl_vx_u8mf4_m | ( | ... | ) | __riscv_vsrl_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vsrl_vx_u8mf8 | ( | ... | ) | __riscv_vsrl_vx_u8mf8(__VA_ARGS__) |
| #define vsrl_vx_u8mf8_m | ( | ... | ) | __riscv_vsrl_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vsse16_v_f16m1 | ( | ... | ) | __riscv_vsse16_v_f16m1(__VA_ARGS__) |
| #define vsse16_v_f16m1_m | ( | ... | ) | __riscv_vsse16_v_f16m1_m(__VA_ARGS__) |
| #define vsse16_v_f16m2 | ( | ... | ) | __riscv_vsse16_v_f16m2(__VA_ARGS__) |
| #define vsse16_v_f16m2_m | ( | ... | ) | __riscv_vsse16_v_f16m2_m(__VA_ARGS__) |
| #define vsse16_v_f16m4 | ( | ... | ) | __riscv_vsse16_v_f16m4(__VA_ARGS__) |
| #define vsse16_v_f16m4_m | ( | ... | ) | __riscv_vsse16_v_f16m4_m(__VA_ARGS__) |
| #define vsse16_v_f16m8 | ( | ... | ) | __riscv_vsse16_v_f16m8(__VA_ARGS__) |
| #define vsse16_v_f16m8_m | ( | ... | ) | __riscv_vsse16_v_f16m8_m(__VA_ARGS__) |
| #define vsse16_v_f16mf2 | ( | ... | ) | __riscv_vsse16_v_f16mf2(__VA_ARGS__) |
| #define vsse16_v_f16mf2_m | ( | ... | ) | __riscv_vsse16_v_f16mf2_m(__VA_ARGS__) |
| #define vsse16_v_f16mf4 | ( | ... | ) | __riscv_vsse16_v_f16mf4(__VA_ARGS__) |
| #define vsse16_v_f16mf4_m | ( | ... | ) | __riscv_vsse16_v_f16mf4_m(__VA_ARGS__) |
| #define vsse16_v_i16m1 | ( | ... | ) | __riscv_vsse16_v_i16m1(__VA_ARGS__) |
| #define vsse16_v_i16m1_m | ( | ... | ) | __riscv_vsse16_v_i16m1_m(__VA_ARGS__) |
| #define vsse16_v_i16m2 | ( | ... | ) | __riscv_vsse16_v_i16m2(__VA_ARGS__) |
| #define vsse16_v_i16m2_m | ( | ... | ) | __riscv_vsse16_v_i16m2_m(__VA_ARGS__) |
| #define vsse16_v_i16m4 | ( | ... | ) | __riscv_vsse16_v_i16m4(__VA_ARGS__) |
| #define vsse16_v_i16m4_m | ( | ... | ) | __riscv_vsse16_v_i16m4_m(__VA_ARGS__) |
| #define vsse16_v_i16m8 | ( | ... | ) | __riscv_vsse16_v_i16m8(__VA_ARGS__) |
| #define vsse16_v_i16m8_m | ( | ... | ) | __riscv_vsse16_v_i16m8_m(__VA_ARGS__) |
| #define vsse16_v_i16mf2 | ( | ... | ) | __riscv_vsse16_v_i16mf2(__VA_ARGS__) |
| #define vsse16_v_i16mf2_m | ( | ... | ) | __riscv_vsse16_v_i16mf2_m(__VA_ARGS__) |
| #define vsse16_v_i16mf4 | ( | ... | ) | __riscv_vsse16_v_i16mf4(__VA_ARGS__) |
| #define vsse16_v_i16mf4_m | ( | ... | ) | __riscv_vsse16_v_i16mf4_m(__VA_ARGS__) |
| #define vsse16_v_u16m1 | ( | ... | ) | __riscv_vsse16_v_u16m1(__VA_ARGS__) |
| #define vsse16_v_u16m1_m | ( | ... | ) | __riscv_vsse16_v_u16m1_m(__VA_ARGS__) |
| #define vsse16_v_u16m2 | ( | ... | ) | __riscv_vsse16_v_u16m2(__VA_ARGS__) |
| #define vsse16_v_u16m2_m | ( | ... | ) | __riscv_vsse16_v_u16m2_m(__VA_ARGS__) |
| #define vsse16_v_u16m4 | ( | ... | ) | __riscv_vsse16_v_u16m4(__VA_ARGS__) |
| #define vsse16_v_u16m4_m | ( | ... | ) | __riscv_vsse16_v_u16m4_m(__VA_ARGS__) |
| #define vsse16_v_u16m8 | ( | ... | ) | __riscv_vsse16_v_u16m8(__VA_ARGS__) |
| #define vsse16_v_u16m8_m | ( | ... | ) | __riscv_vsse16_v_u16m8_m(__VA_ARGS__) |
| #define vsse16_v_u16mf2 | ( | ... | ) | __riscv_vsse16_v_u16mf2(__VA_ARGS__) |
| #define vsse16_v_u16mf2_m | ( | ... | ) | __riscv_vsse16_v_u16mf2_m(__VA_ARGS__) |
| #define vsse16_v_u16mf4 | ( | ... | ) | __riscv_vsse16_v_u16mf4(__VA_ARGS__) |
| #define vsse16_v_u16mf4_m | ( | ... | ) | __riscv_vsse16_v_u16mf4_m(__VA_ARGS__) |
| #define vsse32_v_f32m1 | ( | ... | ) | __riscv_vsse32_v_f32m1(__VA_ARGS__) |
| #define vsse32_v_f32m1_m | ( | ... | ) | __riscv_vsse32_v_f32m1_m(__VA_ARGS__) |
| #define vsse32_v_f32m2 | ( | ... | ) | __riscv_vsse32_v_f32m2(__VA_ARGS__) |
| #define vsse32_v_f32m2_m | ( | ... | ) | __riscv_vsse32_v_f32m2_m(__VA_ARGS__) |
| #define vsse32_v_f32m4 | ( | ... | ) | __riscv_vsse32_v_f32m4(__VA_ARGS__) |
| #define vsse32_v_f32m4_m | ( | ... | ) | __riscv_vsse32_v_f32m4_m(__VA_ARGS__) |
| #define vsse32_v_f32m8 | ( | ... | ) | __riscv_vsse32_v_f32m8(__VA_ARGS__) |
| #define vsse32_v_f32m8_m | ( | ... | ) | __riscv_vsse32_v_f32m8_m(__VA_ARGS__) |
| #define vsse32_v_f32mf2 | ( | ... | ) | __riscv_vsse32_v_f32mf2(__VA_ARGS__) |
| #define vsse32_v_f32mf2_m | ( | ... | ) | __riscv_vsse32_v_f32mf2_m(__VA_ARGS__) |
| #define vsse32_v_i32m1 | ( | ... | ) | __riscv_vsse32_v_i32m1(__VA_ARGS__) |
| #define vsse32_v_i32m1_m | ( | ... | ) | __riscv_vsse32_v_i32m1_m(__VA_ARGS__) |
| #define vsse32_v_i32m2 | ( | ... | ) | __riscv_vsse32_v_i32m2(__VA_ARGS__) |
| #define vsse32_v_i32m2_m | ( | ... | ) | __riscv_vsse32_v_i32m2_m(__VA_ARGS__) |
| #define vsse32_v_i32m4 | ( | ... | ) | __riscv_vsse32_v_i32m4(__VA_ARGS__) |
| #define vsse32_v_i32m4_m | ( | ... | ) | __riscv_vsse32_v_i32m4_m(__VA_ARGS__) |
| #define vsse32_v_i32m8 | ( | ... | ) | __riscv_vsse32_v_i32m8(__VA_ARGS__) |
| #define vsse32_v_i32m8_m | ( | ... | ) | __riscv_vsse32_v_i32m8_m(__VA_ARGS__) |
| #define vsse32_v_i32mf2 | ( | ... | ) | __riscv_vsse32_v_i32mf2(__VA_ARGS__) |
| #define vsse32_v_i32mf2_m | ( | ... | ) | __riscv_vsse32_v_i32mf2_m(__VA_ARGS__) |
| #define vsse32_v_u32m1 | ( | ... | ) | __riscv_vsse32_v_u32m1(__VA_ARGS__) |
| #define vsse32_v_u32m1_m | ( | ... | ) | __riscv_vsse32_v_u32m1_m(__VA_ARGS__) |
| #define vsse32_v_u32m2 | ( | ... | ) | __riscv_vsse32_v_u32m2(__VA_ARGS__) |
| #define vsse32_v_u32m2_m | ( | ... | ) | __riscv_vsse32_v_u32m2_m(__VA_ARGS__) |
| #define vsse32_v_u32m4 | ( | ... | ) | __riscv_vsse32_v_u32m4(__VA_ARGS__) |
| #define vsse32_v_u32m4_m | ( | ... | ) | __riscv_vsse32_v_u32m4_m(__VA_ARGS__) |
| #define vsse32_v_u32m8 | ( | ... | ) | __riscv_vsse32_v_u32m8(__VA_ARGS__) |
| #define vsse32_v_u32m8_m | ( | ... | ) | __riscv_vsse32_v_u32m8_m(__VA_ARGS__) |
| #define vsse32_v_u32mf2 | ( | ... | ) | __riscv_vsse32_v_u32mf2(__VA_ARGS__) |
| #define vsse32_v_u32mf2_m | ( | ... | ) | __riscv_vsse32_v_u32mf2_m(__VA_ARGS__) |
| #define vsse64_v_f64m1 | ( | ... | ) | __riscv_vsse64_v_f64m1(__VA_ARGS__) |
| #define vsse64_v_f64m1_m | ( | ... | ) | __riscv_vsse64_v_f64m1_m(__VA_ARGS__) |
| #define vsse64_v_f64m2 | ( | ... | ) | __riscv_vsse64_v_f64m2(__VA_ARGS__) |
| #define vsse64_v_f64m2_m | ( | ... | ) | __riscv_vsse64_v_f64m2_m(__VA_ARGS__) |
| #define vsse64_v_f64m4 | ( | ... | ) | __riscv_vsse64_v_f64m4(__VA_ARGS__) |
| #define vsse64_v_f64m4_m | ( | ... | ) | __riscv_vsse64_v_f64m4_m(__VA_ARGS__) |
| #define vsse64_v_f64m8 | ( | ... | ) | __riscv_vsse64_v_f64m8(__VA_ARGS__) |
| #define vsse64_v_f64m8_m | ( | ... | ) | __riscv_vsse64_v_f64m8_m(__VA_ARGS__) |
| #define vsse64_v_i64m1 | ( | ... | ) | __riscv_vsse64_v_i64m1(__VA_ARGS__) |
| #define vsse64_v_i64m1_m | ( | ... | ) | __riscv_vsse64_v_i64m1_m(__VA_ARGS__) |
| #define vsse64_v_i64m2 | ( | ... | ) | __riscv_vsse64_v_i64m2(__VA_ARGS__) |
| #define vsse64_v_i64m2_m | ( | ... | ) | __riscv_vsse64_v_i64m2_m(__VA_ARGS__) |
| #define vsse64_v_i64m4 | ( | ... | ) | __riscv_vsse64_v_i64m4(__VA_ARGS__) |
| #define vsse64_v_i64m4_m | ( | ... | ) | __riscv_vsse64_v_i64m4_m(__VA_ARGS__) |
| #define vsse64_v_i64m8 | ( | ... | ) | __riscv_vsse64_v_i64m8(__VA_ARGS__) |
| #define vsse64_v_i64m8_m | ( | ... | ) | __riscv_vsse64_v_i64m8_m(__VA_ARGS__) |
| #define vsse64_v_u64m1 | ( | ... | ) | __riscv_vsse64_v_u64m1(__VA_ARGS__) |
| #define vsse64_v_u64m1_m | ( | ... | ) | __riscv_vsse64_v_u64m1_m(__VA_ARGS__) |
| #define vsse64_v_u64m2 | ( | ... | ) | __riscv_vsse64_v_u64m2(__VA_ARGS__) |
| #define vsse64_v_u64m2_m | ( | ... | ) | __riscv_vsse64_v_u64m2_m(__VA_ARGS__) |
| #define vsse64_v_u64m4 | ( | ... | ) | __riscv_vsse64_v_u64m4(__VA_ARGS__) |
| #define vsse64_v_u64m4_m | ( | ... | ) | __riscv_vsse64_v_u64m4_m(__VA_ARGS__) |
| #define vsse64_v_u64m8 | ( | ... | ) | __riscv_vsse64_v_u64m8(__VA_ARGS__) |
| #define vsse64_v_u64m8_m | ( | ... | ) | __riscv_vsse64_v_u64m8_m(__VA_ARGS__) |
| #define vsse8_v_i8m1 | ( | ... | ) | __riscv_vsse8_v_i8m1(__VA_ARGS__) |
| #define vsse8_v_i8m1_m | ( | ... | ) | __riscv_vsse8_v_i8m1_m(__VA_ARGS__) |
| #define vsse8_v_i8m2 | ( | ... | ) | __riscv_vsse8_v_i8m2(__VA_ARGS__) |
| #define vsse8_v_i8m2_m | ( | ... | ) | __riscv_vsse8_v_i8m2_m(__VA_ARGS__) |
| #define vsse8_v_i8m4 | ( | ... | ) | __riscv_vsse8_v_i8m4(__VA_ARGS__) |
| #define vsse8_v_i8m4_m | ( | ... | ) | __riscv_vsse8_v_i8m4_m(__VA_ARGS__) |
| #define vsse8_v_i8m8 | ( | ... | ) | __riscv_vsse8_v_i8m8(__VA_ARGS__) |
| #define vsse8_v_i8m8_m | ( | ... | ) | __riscv_vsse8_v_i8m8_m(__VA_ARGS__) |
| #define vsse8_v_i8mf2 | ( | ... | ) | __riscv_vsse8_v_i8mf2(__VA_ARGS__) |
| #define vsse8_v_i8mf2_m | ( | ... | ) | __riscv_vsse8_v_i8mf2_m(__VA_ARGS__) |
| #define vsse8_v_i8mf4 | ( | ... | ) | __riscv_vsse8_v_i8mf4(__VA_ARGS__) |
| #define vsse8_v_i8mf4_m | ( | ... | ) | __riscv_vsse8_v_i8mf4_m(__VA_ARGS__) |
| #define vsse8_v_i8mf8 | ( | ... | ) | __riscv_vsse8_v_i8mf8(__VA_ARGS__) |
| #define vsse8_v_i8mf8_m | ( | ... | ) | __riscv_vsse8_v_i8mf8_m(__VA_ARGS__) |
| #define vsse8_v_u8m1 | ( | ... | ) | __riscv_vsse8_v_u8m1(__VA_ARGS__) |
| #define vsse8_v_u8m1_m | ( | ... | ) | __riscv_vsse8_v_u8m1_m(__VA_ARGS__) |
| #define vsse8_v_u8m2 | ( | ... | ) | __riscv_vsse8_v_u8m2(__VA_ARGS__) |
| #define vsse8_v_u8m2_m | ( | ... | ) | __riscv_vsse8_v_u8m2_m(__VA_ARGS__) |
| #define vsse8_v_u8m4 | ( | ... | ) | __riscv_vsse8_v_u8m4(__VA_ARGS__) |
| #define vsse8_v_u8m4_m | ( | ... | ) | __riscv_vsse8_v_u8m4_m(__VA_ARGS__) |
| #define vsse8_v_u8m8 | ( | ... | ) | __riscv_vsse8_v_u8m8(__VA_ARGS__) |
| #define vsse8_v_u8m8_m | ( | ... | ) | __riscv_vsse8_v_u8m8_m(__VA_ARGS__) |
| #define vsse8_v_u8mf2 | ( | ... | ) | __riscv_vsse8_v_u8mf2(__VA_ARGS__) |
| #define vsse8_v_u8mf2_m | ( | ... | ) | __riscv_vsse8_v_u8mf2_m(__VA_ARGS__) |
| #define vsse8_v_u8mf4 | ( | ... | ) | __riscv_vsse8_v_u8mf4(__VA_ARGS__) |
| #define vsse8_v_u8mf4_m | ( | ... | ) | __riscv_vsse8_v_u8mf4_m(__VA_ARGS__) |
| #define vsse8_v_u8mf8 | ( | ... | ) | __riscv_vsse8_v_u8mf8(__VA_ARGS__) |
| #define vsse8_v_u8mf8_m | ( | ... | ) | __riscv_vsse8_v_u8mf8_m(__VA_ARGS__) |
| #define vsseg2e16_v_f16m1 | ( | ... | ) | __riscv_vsseg2e16_v_f16m1(__VA_ARGS__) |
| #define vsseg2e16_v_f16m1_m | ( | ... | ) | __riscv_vsseg2e16_v_f16m1_m(__VA_ARGS__) |
| #define vsseg2e16_v_f16m2 | ( | ... | ) | __riscv_vsseg2e16_v_f16m2(__VA_ARGS__) |
| #define vsseg2e16_v_f16m2_m | ( | ... | ) | __riscv_vsseg2e16_v_f16m2_m(__VA_ARGS__) |
| #define vsseg2e16_v_f16m4 | ( | ... | ) | __riscv_vsseg2e16_v_f16m4(__VA_ARGS__) |
| #define vsseg2e16_v_f16m4_m | ( | ... | ) | __riscv_vsseg2e16_v_f16m4_m(__VA_ARGS__) |
| #define vsseg2e16_v_f16mf2 | ( | ... | ) | __riscv_vsseg2e16_v_f16mf2(__VA_ARGS__) |
| #define vsseg2e16_v_f16mf2_m | ( | ... | ) | __riscv_vsseg2e16_v_f16mf2_m(__VA_ARGS__) |
| #define vsseg2e16_v_f16mf4 | ( | ... | ) | __riscv_vsseg2e16_v_f16mf4(__VA_ARGS__) |
| #define vsseg2e16_v_f16mf4_m | ( | ... | ) | __riscv_vsseg2e16_v_f16mf4_m(__VA_ARGS__) |
| #define vsseg2e16_v_i16m1 | ( | ... | ) | __riscv_vsseg2e16_v_i16m1(__VA_ARGS__) |
| #define vsseg2e16_v_i16m1_m | ( | ... | ) | __riscv_vsseg2e16_v_i16m1_m(__VA_ARGS__) |
| #define vsseg2e16_v_i16m2 | ( | ... | ) | __riscv_vsseg2e16_v_i16m2(__VA_ARGS__) |
| #define vsseg2e16_v_i16m2_m | ( | ... | ) | __riscv_vsseg2e16_v_i16m2_m(__VA_ARGS__) |
| #define vsseg2e16_v_i16m4 | ( | ... | ) | __riscv_vsseg2e16_v_i16m4(__VA_ARGS__) |
| #define vsseg2e16_v_i16m4_m | ( | ... | ) | __riscv_vsseg2e16_v_i16m4_m(__VA_ARGS__) |
| #define vsseg2e16_v_i16mf2 | ( | ... | ) | __riscv_vsseg2e16_v_i16mf2(__VA_ARGS__) |
| #define vsseg2e16_v_i16mf2_m | ( | ... | ) | __riscv_vsseg2e16_v_i16mf2_m(__VA_ARGS__) |
| #define vsseg2e16_v_i16mf4 | ( | ... | ) | __riscv_vsseg2e16_v_i16mf4(__VA_ARGS__) |
| #define vsseg2e16_v_i16mf4_m | ( | ... | ) | __riscv_vsseg2e16_v_i16mf4_m(__VA_ARGS__) |
| #define vsseg2e16_v_u16m1 | ( | ... | ) | __riscv_vsseg2e16_v_u16m1(__VA_ARGS__) |
| #define vsseg2e16_v_u16m1_m | ( | ... | ) | __riscv_vsseg2e16_v_u16m1_m(__VA_ARGS__) |
| #define vsseg2e16_v_u16m2 | ( | ... | ) | __riscv_vsseg2e16_v_u16m2(__VA_ARGS__) |
| #define vsseg2e16_v_u16m2_m | ( | ... | ) | __riscv_vsseg2e16_v_u16m2_m(__VA_ARGS__) |
| #define vsseg2e16_v_u16m4 | ( | ... | ) | __riscv_vsseg2e16_v_u16m4(__VA_ARGS__) |
| #define vsseg2e16_v_u16m4_m | ( | ... | ) | __riscv_vsseg2e16_v_u16m4_m(__VA_ARGS__) |
| #define vsseg2e16_v_u16mf2 | ( | ... | ) | __riscv_vsseg2e16_v_u16mf2(__VA_ARGS__) |
| #define vsseg2e16_v_u16mf2_m | ( | ... | ) | __riscv_vsseg2e16_v_u16mf2_m(__VA_ARGS__) |
| #define vsseg2e16_v_u16mf4 | ( | ... | ) | __riscv_vsseg2e16_v_u16mf4(__VA_ARGS__) |
| #define vsseg2e16_v_u16mf4_m | ( | ... | ) | __riscv_vsseg2e16_v_u16mf4_m(__VA_ARGS__) |
| #define vsseg2e32_v_f32m1 | ( | ... | ) | __riscv_vsseg2e32_v_f32m1(__VA_ARGS__) |
| #define vsseg2e32_v_f32m1_m | ( | ... | ) | __riscv_vsseg2e32_v_f32m1_m(__VA_ARGS__) |
| #define vsseg2e32_v_f32m2 | ( | ... | ) | __riscv_vsseg2e32_v_f32m2(__VA_ARGS__) |
| #define vsseg2e32_v_f32m2_m | ( | ... | ) | __riscv_vsseg2e32_v_f32m2_m(__VA_ARGS__) |
| #define vsseg2e32_v_f32m4 | ( | ... | ) | __riscv_vsseg2e32_v_f32m4(__VA_ARGS__) |
| #define vsseg2e32_v_f32m4_m | ( | ... | ) | __riscv_vsseg2e32_v_f32m4_m(__VA_ARGS__) |
| #define vsseg2e32_v_f32mf2 | ( | ... | ) | __riscv_vsseg2e32_v_f32mf2(__VA_ARGS__) |
| #define vsseg2e32_v_f32mf2_m | ( | ... | ) | __riscv_vsseg2e32_v_f32mf2_m(__VA_ARGS__) |
| #define vsseg2e32_v_i32m1 | ( | ... | ) | __riscv_vsseg2e32_v_i32m1(__VA_ARGS__) |
| #define vsseg2e32_v_i32m1_m | ( | ... | ) | __riscv_vsseg2e32_v_i32m1_m(__VA_ARGS__) |
| #define vsseg2e32_v_i32m2 | ( | ... | ) | __riscv_vsseg2e32_v_i32m2(__VA_ARGS__) |
| #define vsseg2e32_v_i32m2_m | ( | ... | ) | __riscv_vsseg2e32_v_i32m2_m(__VA_ARGS__) |
| #define vsseg2e32_v_i32m4 | ( | ... | ) | __riscv_vsseg2e32_v_i32m4(__VA_ARGS__) |
| #define vsseg2e32_v_i32m4_m | ( | ... | ) | __riscv_vsseg2e32_v_i32m4_m(__VA_ARGS__) |
| #define vsseg2e32_v_i32mf2 | ( | ... | ) | __riscv_vsseg2e32_v_i32mf2(__VA_ARGS__) |
| #define vsseg2e32_v_i32mf2_m | ( | ... | ) | __riscv_vsseg2e32_v_i32mf2_m(__VA_ARGS__) |
| #define vsseg2e32_v_u32m1 | ( | ... | ) | __riscv_vsseg2e32_v_u32m1(__VA_ARGS__) |
| #define vsseg2e32_v_u32m1_m | ( | ... | ) | __riscv_vsseg2e32_v_u32m1_m(__VA_ARGS__) |
| #define vsseg2e32_v_u32m2 | ( | ... | ) | __riscv_vsseg2e32_v_u32m2(__VA_ARGS__) |
| #define vsseg2e32_v_u32m2_m | ( | ... | ) | __riscv_vsseg2e32_v_u32m2_m(__VA_ARGS__) |
| #define vsseg2e32_v_u32m4 | ( | ... | ) | __riscv_vsseg2e32_v_u32m4(__VA_ARGS__) |
| #define vsseg2e32_v_u32m4_m | ( | ... | ) | __riscv_vsseg2e32_v_u32m4_m(__VA_ARGS__) |
| #define vsseg2e32_v_u32mf2 | ( | ... | ) | __riscv_vsseg2e32_v_u32mf2(__VA_ARGS__) |
| #define vsseg2e32_v_u32mf2_m | ( | ... | ) | __riscv_vsseg2e32_v_u32mf2_m(__VA_ARGS__) |
| #define vsseg2e64_v_f64m1 | ( | ... | ) | __riscv_vsseg2e64_v_f64m1(__VA_ARGS__) |
| #define vsseg2e64_v_f64m1_m | ( | ... | ) | __riscv_vsseg2e64_v_f64m1_m(__VA_ARGS__) |
| #define vsseg2e64_v_f64m2 | ( | ... | ) | __riscv_vsseg2e64_v_f64m2(__VA_ARGS__) |
| #define vsseg2e64_v_f64m2_m | ( | ... | ) | __riscv_vsseg2e64_v_f64m2_m(__VA_ARGS__) |
| #define vsseg2e64_v_f64m4 | ( | ... | ) | __riscv_vsseg2e64_v_f64m4(__VA_ARGS__) |
| #define vsseg2e64_v_f64m4_m | ( | ... | ) | __riscv_vsseg2e64_v_f64m4_m(__VA_ARGS__) |
| #define vsseg2e64_v_i64m1 | ( | ... | ) | __riscv_vsseg2e64_v_i64m1(__VA_ARGS__) |
| #define vsseg2e64_v_i64m1_m | ( | ... | ) | __riscv_vsseg2e64_v_i64m1_m(__VA_ARGS__) |
| #define vsseg2e64_v_i64m2 | ( | ... | ) | __riscv_vsseg2e64_v_i64m2(__VA_ARGS__) |
| #define vsseg2e64_v_i64m2_m | ( | ... | ) | __riscv_vsseg2e64_v_i64m2_m(__VA_ARGS__) |
| #define vsseg2e64_v_i64m4 | ( | ... | ) | __riscv_vsseg2e64_v_i64m4(__VA_ARGS__) |
| #define vsseg2e64_v_i64m4_m | ( | ... | ) | __riscv_vsseg2e64_v_i64m4_m(__VA_ARGS__) |
| #define vsseg2e64_v_u64m1 | ( | ... | ) | __riscv_vsseg2e64_v_u64m1(__VA_ARGS__) |
| #define vsseg2e64_v_u64m1_m | ( | ... | ) | __riscv_vsseg2e64_v_u64m1_m(__VA_ARGS__) |
| #define vsseg2e64_v_u64m2 | ( | ... | ) | __riscv_vsseg2e64_v_u64m2(__VA_ARGS__) |
| #define vsseg2e64_v_u64m2_m | ( | ... | ) | __riscv_vsseg2e64_v_u64m2_m(__VA_ARGS__) |
| #define vsseg2e64_v_u64m4 | ( | ... | ) | __riscv_vsseg2e64_v_u64m4(__VA_ARGS__) |
| #define vsseg2e64_v_u64m4_m | ( | ... | ) | __riscv_vsseg2e64_v_u64m4_m(__VA_ARGS__) |
| #define vsseg2e8_v_i8m1 | ( | ... | ) | __riscv_vsseg2e8_v_i8m1(__VA_ARGS__) |
| #define vsseg2e8_v_i8m1_m | ( | ... | ) | __riscv_vsseg2e8_v_i8m1_m(__VA_ARGS__) |
| #define vsseg2e8_v_i8m2 | ( | ... | ) | __riscv_vsseg2e8_v_i8m2(__VA_ARGS__) |
| #define vsseg2e8_v_i8m2_m | ( | ... | ) | __riscv_vsseg2e8_v_i8m2_m(__VA_ARGS__) |
| #define vsseg2e8_v_i8m4 | ( | ... | ) | __riscv_vsseg2e8_v_i8m4(__VA_ARGS__) |
| #define vsseg2e8_v_i8m4_m | ( | ... | ) | __riscv_vsseg2e8_v_i8m4_m(__VA_ARGS__) |
| #define vsseg2e8_v_i8mf2 | ( | ... | ) | __riscv_vsseg2e8_v_i8mf2(__VA_ARGS__) |
| #define vsseg2e8_v_i8mf2_m | ( | ... | ) | __riscv_vsseg2e8_v_i8mf2_m(__VA_ARGS__) |
| #define vsseg2e8_v_i8mf4 | ( | ... | ) | __riscv_vsseg2e8_v_i8mf4(__VA_ARGS__) |
| #define vsseg2e8_v_i8mf4_m | ( | ... | ) | __riscv_vsseg2e8_v_i8mf4_m(__VA_ARGS__) |
| #define vsseg2e8_v_i8mf8 | ( | ... | ) | __riscv_vsseg2e8_v_i8mf8(__VA_ARGS__) |
| #define vsseg2e8_v_i8mf8_m | ( | ... | ) | __riscv_vsseg2e8_v_i8mf8_m(__VA_ARGS__) |
| #define vsseg2e8_v_u8m1 | ( | ... | ) | __riscv_vsseg2e8_v_u8m1(__VA_ARGS__) |
| #define vsseg2e8_v_u8m1_m | ( | ... | ) | __riscv_vsseg2e8_v_u8m1_m(__VA_ARGS__) |
| #define vsseg2e8_v_u8m2 | ( | ... | ) | __riscv_vsseg2e8_v_u8m2(__VA_ARGS__) |
| #define vsseg2e8_v_u8m2_m | ( | ... | ) | __riscv_vsseg2e8_v_u8m2_m(__VA_ARGS__) |
| #define vsseg2e8_v_u8m4 | ( | ... | ) | __riscv_vsseg2e8_v_u8m4(__VA_ARGS__) |
| #define vsseg2e8_v_u8m4_m | ( | ... | ) | __riscv_vsseg2e8_v_u8m4_m(__VA_ARGS__) |
| #define vsseg2e8_v_u8mf2 | ( | ... | ) | __riscv_vsseg2e8_v_u8mf2(__VA_ARGS__) |
| #define vsseg2e8_v_u8mf2_m | ( | ... | ) | __riscv_vsseg2e8_v_u8mf2_m(__VA_ARGS__) |
| #define vsseg2e8_v_u8mf4 | ( | ... | ) | __riscv_vsseg2e8_v_u8mf4(__VA_ARGS__) |
| #define vsseg2e8_v_u8mf4_m | ( | ... | ) | __riscv_vsseg2e8_v_u8mf4_m(__VA_ARGS__) |
| #define vsseg2e8_v_u8mf8 | ( | ... | ) | __riscv_vsseg2e8_v_u8mf8(__VA_ARGS__) |
| #define vsseg2e8_v_u8mf8_m | ( | ... | ) | __riscv_vsseg2e8_v_u8mf8_m(__VA_ARGS__) |
| #define vsseg3e16_v_f16m1 | ( | ... | ) | __riscv_vsseg3e16_v_f16m1(__VA_ARGS__) |
| #define vsseg3e16_v_f16m1_m | ( | ... | ) | __riscv_vsseg3e16_v_f16m1_m(__VA_ARGS__) |
| #define vsseg3e16_v_f16m2 | ( | ... | ) | __riscv_vsseg3e16_v_f16m2(__VA_ARGS__) |
| #define vsseg3e16_v_f16m2_m | ( | ... | ) | __riscv_vsseg3e16_v_f16m2_m(__VA_ARGS__) |
| #define vsseg3e16_v_f16mf2 | ( | ... | ) | __riscv_vsseg3e16_v_f16mf2(__VA_ARGS__) |
| #define vsseg3e16_v_f16mf2_m | ( | ... | ) | __riscv_vsseg3e16_v_f16mf2_m(__VA_ARGS__) |
| #define vsseg3e16_v_f16mf4 | ( | ... | ) | __riscv_vsseg3e16_v_f16mf4(__VA_ARGS__) |
| #define vsseg3e16_v_f16mf4_m | ( | ... | ) | __riscv_vsseg3e16_v_f16mf4_m(__VA_ARGS__) |
| #define vsseg3e16_v_i16m1 | ( | ... | ) | __riscv_vsseg3e16_v_i16m1(__VA_ARGS__) |
| #define vsseg3e16_v_i16m1_m | ( | ... | ) | __riscv_vsseg3e16_v_i16m1_m(__VA_ARGS__) |
| #define vsseg3e16_v_i16m2 | ( | ... | ) | __riscv_vsseg3e16_v_i16m2(__VA_ARGS__) |
| #define vsseg3e16_v_i16m2_m | ( | ... | ) | __riscv_vsseg3e16_v_i16m2_m(__VA_ARGS__) |
| #define vsseg3e16_v_i16mf2 | ( | ... | ) | __riscv_vsseg3e16_v_i16mf2(__VA_ARGS__) |
| #define vsseg3e16_v_i16mf2_m | ( | ... | ) | __riscv_vsseg3e16_v_i16mf2_m(__VA_ARGS__) |
| #define vsseg3e16_v_i16mf4 | ( | ... | ) | __riscv_vsseg3e16_v_i16mf4(__VA_ARGS__) |
| #define vsseg3e16_v_i16mf4_m | ( | ... | ) | __riscv_vsseg3e16_v_i16mf4_m(__VA_ARGS__) |
| #define vsseg3e16_v_u16m1 | ( | ... | ) | __riscv_vsseg3e16_v_u16m1(__VA_ARGS__) |
| #define vsseg3e16_v_u16m1_m | ( | ... | ) | __riscv_vsseg3e16_v_u16m1_m(__VA_ARGS__) |
| #define vsseg3e16_v_u16m2 | ( | ... | ) | __riscv_vsseg3e16_v_u16m2(__VA_ARGS__) |
| #define vsseg3e16_v_u16m2_m | ( | ... | ) | __riscv_vsseg3e16_v_u16m2_m(__VA_ARGS__) |
| #define vsseg3e16_v_u16mf2 | ( | ... | ) | __riscv_vsseg3e16_v_u16mf2(__VA_ARGS__) |
| #define vsseg3e16_v_u16mf2_m | ( | ... | ) | __riscv_vsseg3e16_v_u16mf2_m(__VA_ARGS__) |
| #define vsseg3e16_v_u16mf4 | ( | ... | ) | __riscv_vsseg3e16_v_u16mf4(__VA_ARGS__) |
| #define vsseg3e16_v_u16mf4_m | ( | ... | ) | __riscv_vsseg3e16_v_u16mf4_m(__VA_ARGS__) |
| #define vsseg3e32_v_f32m1 | ( | ... | ) | __riscv_vsseg3e32_v_f32m1(__VA_ARGS__) |
| #define vsseg3e32_v_f32m1_m | ( | ... | ) | __riscv_vsseg3e32_v_f32m1_m(__VA_ARGS__) |
| #define vsseg3e32_v_f32m2 | ( | ... | ) | __riscv_vsseg3e32_v_f32m2(__VA_ARGS__) |
| #define vsseg3e32_v_f32m2_m | ( | ... | ) | __riscv_vsseg3e32_v_f32m2_m(__VA_ARGS__) |
| #define vsseg3e32_v_f32mf2 | ( | ... | ) | __riscv_vsseg3e32_v_f32mf2(__VA_ARGS__) |
| #define vsseg3e32_v_f32mf2_m | ( | ... | ) | __riscv_vsseg3e32_v_f32mf2_m(__VA_ARGS__) |
| #define vsseg3e32_v_i32m1 | ( | ... | ) | __riscv_vsseg3e32_v_i32m1(__VA_ARGS__) |
| #define vsseg3e32_v_i32m1_m | ( | ... | ) | __riscv_vsseg3e32_v_i32m1_m(__VA_ARGS__) |
| #define vsseg3e32_v_i32m2 | ( | ... | ) | __riscv_vsseg3e32_v_i32m2(__VA_ARGS__) |
| #define vsseg3e32_v_i32m2_m | ( | ... | ) | __riscv_vsseg3e32_v_i32m2_m(__VA_ARGS__) |
| #define vsseg3e32_v_i32mf2 | ( | ... | ) | __riscv_vsseg3e32_v_i32mf2(__VA_ARGS__) |
| #define vsseg3e32_v_i32mf2_m | ( | ... | ) | __riscv_vsseg3e32_v_i32mf2_m(__VA_ARGS__) |
| #define vsseg3e32_v_u32m1 | ( | ... | ) | __riscv_vsseg3e32_v_u32m1(__VA_ARGS__) |
| #define vsseg3e32_v_u32m1_m | ( | ... | ) | __riscv_vsseg3e32_v_u32m1_m(__VA_ARGS__) |
| #define vsseg3e32_v_u32m2 | ( | ... | ) | __riscv_vsseg3e32_v_u32m2(__VA_ARGS__) |
| #define vsseg3e32_v_u32m2_m | ( | ... | ) | __riscv_vsseg3e32_v_u32m2_m(__VA_ARGS__) |
| #define vsseg3e32_v_u32mf2 | ( | ... | ) | __riscv_vsseg3e32_v_u32mf2(__VA_ARGS__) |
| #define vsseg3e32_v_u32mf2_m | ( | ... | ) | __riscv_vsseg3e32_v_u32mf2_m(__VA_ARGS__) |
| #define vsseg3e64_v_f64m1 | ( | ... | ) | __riscv_vsseg3e64_v_f64m1(__VA_ARGS__) |
| #define vsseg3e64_v_f64m1_m | ( | ... | ) | __riscv_vsseg3e64_v_f64m1_m(__VA_ARGS__) |
| #define vsseg3e64_v_f64m2 | ( | ... | ) | __riscv_vsseg3e64_v_f64m2(__VA_ARGS__) |
| #define vsseg3e64_v_f64m2_m | ( | ... | ) | __riscv_vsseg3e64_v_f64m2_m(__VA_ARGS__) |
| #define vsseg3e64_v_i64m1 | ( | ... | ) | __riscv_vsseg3e64_v_i64m1(__VA_ARGS__) |
| #define vsseg3e64_v_i64m1_m | ( | ... | ) | __riscv_vsseg3e64_v_i64m1_m(__VA_ARGS__) |
| #define vsseg3e64_v_i64m2 | ( | ... | ) | __riscv_vsseg3e64_v_i64m2(__VA_ARGS__) |
| #define vsseg3e64_v_i64m2_m | ( | ... | ) | __riscv_vsseg3e64_v_i64m2_m(__VA_ARGS__) |
| #define vsseg3e64_v_u64m1 | ( | ... | ) | __riscv_vsseg3e64_v_u64m1(__VA_ARGS__) |
| #define vsseg3e64_v_u64m1_m | ( | ... | ) | __riscv_vsseg3e64_v_u64m1_m(__VA_ARGS__) |
| #define vsseg3e64_v_u64m2 | ( | ... | ) | __riscv_vsseg3e64_v_u64m2(__VA_ARGS__) |
| #define vsseg3e64_v_u64m2_m | ( | ... | ) | __riscv_vsseg3e64_v_u64m2_m(__VA_ARGS__) |
| #define vsseg3e8_v_i8m1 | ( | ... | ) | __riscv_vsseg3e8_v_i8m1(__VA_ARGS__) |
| #define vsseg3e8_v_i8m1_m | ( | ... | ) | __riscv_vsseg3e8_v_i8m1_m(__VA_ARGS__) |
| #define vsseg3e8_v_i8m2 | ( | ... | ) | __riscv_vsseg3e8_v_i8m2(__VA_ARGS__) |
| #define vsseg3e8_v_i8m2_m | ( | ... | ) | __riscv_vsseg3e8_v_i8m2_m(__VA_ARGS__) |
| #define vsseg3e8_v_i8mf2 | ( | ... | ) | __riscv_vsseg3e8_v_i8mf2(__VA_ARGS__) |
| #define vsseg3e8_v_i8mf2_m | ( | ... | ) | __riscv_vsseg3e8_v_i8mf2_m(__VA_ARGS__) |
| #define vsseg3e8_v_i8mf4 | ( | ... | ) | __riscv_vsseg3e8_v_i8mf4(__VA_ARGS__) |
| #define vsseg3e8_v_i8mf4_m | ( | ... | ) | __riscv_vsseg3e8_v_i8mf4_m(__VA_ARGS__) |
| #define vsseg3e8_v_i8mf8 | ( | ... | ) | __riscv_vsseg3e8_v_i8mf8(__VA_ARGS__) |
| #define vsseg3e8_v_i8mf8_m | ( | ... | ) | __riscv_vsseg3e8_v_i8mf8_m(__VA_ARGS__) |
| #define vsseg3e8_v_u8m1 | ( | ... | ) | __riscv_vsseg3e8_v_u8m1(__VA_ARGS__) |
| #define vsseg3e8_v_u8m1_m | ( | ... | ) | __riscv_vsseg3e8_v_u8m1_m(__VA_ARGS__) |
| #define vsseg3e8_v_u8m2 | ( | ... | ) | __riscv_vsseg3e8_v_u8m2(__VA_ARGS__) |
| #define vsseg3e8_v_u8m2_m | ( | ... | ) | __riscv_vsseg3e8_v_u8m2_m(__VA_ARGS__) |
| #define vsseg3e8_v_u8mf2 | ( | ... | ) | __riscv_vsseg3e8_v_u8mf2(__VA_ARGS__) |
| #define vsseg3e8_v_u8mf2_m | ( | ... | ) | __riscv_vsseg3e8_v_u8mf2_m(__VA_ARGS__) |
| #define vsseg3e8_v_u8mf4 | ( | ... | ) | __riscv_vsseg3e8_v_u8mf4(__VA_ARGS__) |
| #define vsseg3e8_v_u8mf4_m | ( | ... | ) | __riscv_vsseg3e8_v_u8mf4_m(__VA_ARGS__) |
| #define vsseg3e8_v_u8mf8 | ( | ... | ) | __riscv_vsseg3e8_v_u8mf8(__VA_ARGS__) |
| #define vsseg3e8_v_u8mf8_m | ( | ... | ) | __riscv_vsseg3e8_v_u8mf8_m(__VA_ARGS__) |
| #define vsseg4e16_v_f16m1 | ( | ... | ) | __riscv_vsseg4e16_v_f16m1(__VA_ARGS__) |
| #define vsseg4e16_v_f16m1_m | ( | ... | ) | __riscv_vsseg4e16_v_f16m1_m(__VA_ARGS__) |
| #define vsseg4e16_v_f16m2 | ( | ... | ) | __riscv_vsseg4e16_v_f16m2(__VA_ARGS__) |
| #define vsseg4e16_v_f16m2_m | ( | ... | ) | __riscv_vsseg4e16_v_f16m2_m(__VA_ARGS__) |
| #define vsseg4e16_v_f16mf2 | ( | ... | ) | __riscv_vsseg4e16_v_f16mf2(__VA_ARGS__) |
| #define vsseg4e16_v_f16mf2_m | ( | ... | ) | __riscv_vsseg4e16_v_f16mf2_m(__VA_ARGS__) |
| #define vsseg4e16_v_f16mf4 | ( | ... | ) | __riscv_vsseg4e16_v_f16mf4(__VA_ARGS__) |
| #define vsseg4e16_v_f16mf4_m | ( | ... | ) | __riscv_vsseg4e16_v_f16mf4_m(__VA_ARGS__) |
| #define vsseg4e16_v_i16m1 | ( | ... | ) | __riscv_vsseg4e16_v_i16m1(__VA_ARGS__) |
| #define vsseg4e16_v_i16m1_m | ( | ... | ) | __riscv_vsseg4e16_v_i16m1_m(__VA_ARGS__) |
| #define vsseg4e16_v_i16m2 | ( | ... | ) | __riscv_vsseg4e16_v_i16m2(__VA_ARGS__) |
| #define vsseg4e16_v_i16m2_m | ( | ... | ) | __riscv_vsseg4e16_v_i16m2_m(__VA_ARGS__) |
| #define vsseg4e16_v_i16mf2 | ( | ... | ) | __riscv_vsseg4e16_v_i16mf2(__VA_ARGS__) |
| #define vsseg4e16_v_i16mf2_m | ( | ... | ) | __riscv_vsseg4e16_v_i16mf2_m(__VA_ARGS__) |
| #define vsseg4e16_v_i16mf4 | ( | ... | ) | __riscv_vsseg4e16_v_i16mf4(__VA_ARGS__) |
| #define vsseg4e16_v_i16mf4_m | ( | ... | ) | __riscv_vsseg4e16_v_i16mf4_m(__VA_ARGS__) |
| #define vsseg4e16_v_u16m1 | ( | ... | ) | __riscv_vsseg4e16_v_u16m1(__VA_ARGS__) |
| #define vsseg4e16_v_u16m1_m | ( | ... | ) | __riscv_vsseg4e16_v_u16m1_m(__VA_ARGS__) |
| #define vsseg4e16_v_u16m2 | ( | ... | ) | __riscv_vsseg4e16_v_u16m2(__VA_ARGS__) |
| #define vsseg4e16_v_u16m2_m | ( | ... | ) | __riscv_vsseg4e16_v_u16m2_m(__VA_ARGS__) |
| #define vsseg4e16_v_u16mf2 | ( | ... | ) | __riscv_vsseg4e16_v_u16mf2(__VA_ARGS__) |
| #define vsseg4e16_v_u16mf2_m | ( | ... | ) | __riscv_vsseg4e16_v_u16mf2_m(__VA_ARGS__) |
| #define vsseg4e16_v_u16mf4 | ( | ... | ) | __riscv_vsseg4e16_v_u16mf4(__VA_ARGS__) |
| #define vsseg4e16_v_u16mf4_m | ( | ... | ) | __riscv_vsseg4e16_v_u16mf4_m(__VA_ARGS__) |
| #define vsseg4e32_v_f32m1 | ( | ... | ) | __riscv_vsseg4e32_v_f32m1(__VA_ARGS__) |
| #define vsseg4e32_v_f32m1_m | ( | ... | ) | __riscv_vsseg4e32_v_f32m1_m(__VA_ARGS__) |
| #define vsseg4e32_v_f32m2 | ( | ... | ) | __riscv_vsseg4e32_v_f32m2(__VA_ARGS__) |
| #define vsseg4e32_v_f32m2_m | ( | ... | ) | __riscv_vsseg4e32_v_f32m2_m(__VA_ARGS__) |
| #define vsseg4e32_v_f32mf2 | ( | ... | ) | __riscv_vsseg4e32_v_f32mf2(__VA_ARGS__) |
| #define vsseg4e32_v_f32mf2_m | ( | ... | ) | __riscv_vsseg4e32_v_f32mf2_m(__VA_ARGS__) |
| #define vsseg4e32_v_i32m1 | ( | ... | ) | __riscv_vsseg4e32_v_i32m1(__VA_ARGS__) |
| #define vsseg4e32_v_i32m1_m | ( | ... | ) | __riscv_vsseg4e32_v_i32m1_m(__VA_ARGS__) |
| #define vsseg4e32_v_i32m2 | ( | ... | ) | __riscv_vsseg4e32_v_i32m2(__VA_ARGS__) |
| #define vsseg4e32_v_i32m2_m | ( | ... | ) | __riscv_vsseg4e32_v_i32m2_m(__VA_ARGS__) |
| #define vsseg4e32_v_i32mf2 | ( | ... | ) | __riscv_vsseg4e32_v_i32mf2(__VA_ARGS__) |
| #define vsseg4e32_v_i32mf2_m | ( | ... | ) | __riscv_vsseg4e32_v_i32mf2_m(__VA_ARGS__) |
| #define vsseg4e32_v_u32m1 | ( | ... | ) | __riscv_vsseg4e32_v_u32m1(__VA_ARGS__) |
| #define vsseg4e32_v_u32m1_m | ( | ... | ) | __riscv_vsseg4e32_v_u32m1_m(__VA_ARGS__) |
| #define vsseg4e32_v_u32m2 | ( | ... | ) | __riscv_vsseg4e32_v_u32m2(__VA_ARGS__) |
| #define vsseg4e32_v_u32m2_m | ( | ... | ) | __riscv_vsseg4e32_v_u32m2_m(__VA_ARGS__) |
| #define vsseg4e32_v_u32mf2 | ( | ... | ) | __riscv_vsseg4e32_v_u32mf2(__VA_ARGS__) |
| #define vsseg4e32_v_u32mf2_m | ( | ... | ) | __riscv_vsseg4e32_v_u32mf2_m(__VA_ARGS__) |
| #define vsseg4e64_v_f64m1 | ( | ... | ) | __riscv_vsseg4e64_v_f64m1(__VA_ARGS__) |
| #define vsseg4e64_v_f64m1_m | ( | ... | ) | __riscv_vsseg4e64_v_f64m1_m(__VA_ARGS__) |
| #define vsseg4e64_v_f64m2 | ( | ... | ) | __riscv_vsseg4e64_v_f64m2(__VA_ARGS__) |
| #define vsseg4e64_v_f64m2_m | ( | ... | ) | __riscv_vsseg4e64_v_f64m2_m(__VA_ARGS__) |
| #define vsseg4e64_v_i64m1 | ( | ... | ) | __riscv_vsseg4e64_v_i64m1(__VA_ARGS__) |
| #define vsseg4e64_v_i64m1_m | ( | ... | ) | __riscv_vsseg4e64_v_i64m1_m(__VA_ARGS__) |
| #define vsseg4e64_v_i64m2 | ( | ... | ) | __riscv_vsseg4e64_v_i64m2(__VA_ARGS__) |
| #define vsseg4e64_v_i64m2_m | ( | ... | ) | __riscv_vsseg4e64_v_i64m2_m(__VA_ARGS__) |
| #define vsseg4e64_v_u64m1 | ( | ... | ) | __riscv_vsseg4e64_v_u64m1(__VA_ARGS__) |
| #define vsseg4e64_v_u64m1_m | ( | ... | ) | __riscv_vsseg4e64_v_u64m1_m(__VA_ARGS__) |
| #define vsseg4e64_v_u64m2 | ( | ... | ) | __riscv_vsseg4e64_v_u64m2(__VA_ARGS__) |
| #define vsseg4e64_v_u64m2_m | ( | ... | ) | __riscv_vsseg4e64_v_u64m2_m(__VA_ARGS__) |
| #define vsseg4e8_v_i8m1 | ( | ... | ) | __riscv_vsseg4e8_v_i8m1(__VA_ARGS__) |
| #define vsseg4e8_v_i8m1_m | ( | ... | ) | __riscv_vsseg4e8_v_i8m1_m(__VA_ARGS__) |
| #define vsseg4e8_v_i8m2 | ( | ... | ) | __riscv_vsseg4e8_v_i8m2(__VA_ARGS__) |
| #define vsseg4e8_v_i8m2_m | ( | ... | ) | __riscv_vsseg4e8_v_i8m2_m(__VA_ARGS__) |
| #define vsseg4e8_v_i8mf2 | ( | ... | ) | __riscv_vsseg4e8_v_i8mf2(__VA_ARGS__) |
| #define vsseg4e8_v_i8mf2_m | ( | ... | ) | __riscv_vsseg4e8_v_i8mf2_m(__VA_ARGS__) |
| #define vsseg4e8_v_i8mf4 | ( | ... | ) | __riscv_vsseg4e8_v_i8mf4(__VA_ARGS__) |
| #define vsseg4e8_v_i8mf4_m | ( | ... | ) | __riscv_vsseg4e8_v_i8mf4_m(__VA_ARGS__) |
| #define vsseg4e8_v_i8mf8 | ( | ... | ) | __riscv_vsseg4e8_v_i8mf8(__VA_ARGS__) |
| #define vsseg4e8_v_i8mf8_m | ( | ... | ) | __riscv_vsseg4e8_v_i8mf8_m(__VA_ARGS__) |
| #define vsseg4e8_v_u8m1 | ( | ... | ) | __riscv_vsseg4e8_v_u8m1(__VA_ARGS__) |
| #define vsseg4e8_v_u8m1_m | ( | ... | ) | __riscv_vsseg4e8_v_u8m1_m(__VA_ARGS__) |
| #define vsseg4e8_v_u8m2 | ( | ... | ) | __riscv_vsseg4e8_v_u8m2(__VA_ARGS__) |
| #define vsseg4e8_v_u8m2_m | ( | ... | ) | __riscv_vsseg4e8_v_u8m2_m(__VA_ARGS__) |
| #define vsseg4e8_v_u8mf2 | ( | ... | ) | __riscv_vsseg4e8_v_u8mf2(__VA_ARGS__) |
| #define vsseg4e8_v_u8mf2_m | ( | ... | ) | __riscv_vsseg4e8_v_u8mf2_m(__VA_ARGS__) |
| #define vsseg4e8_v_u8mf4 | ( | ... | ) | __riscv_vsseg4e8_v_u8mf4(__VA_ARGS__) |
| #define vsseg4e8_v_u8mf4_m | ( | ... | ) | __riscv_vsseg4e8_v_u8mf4_m(__VA_ARGS__) |
| #define vsseg4e8_v_u8mf8 | ( | ... | ) | __riscv_vsseg4e8_v_u8mf8(__VA_ARGS__) |
| #define vsseg4e8_v_u8mf8_m | ( | ... | ) | __riscv_vsseg4e8_v_u8mf8_m(__VA_ARGS__) |
| #define vsseg5e16_v_f16m1 | ( | ... | ) | __riscv_vsseg5e16_v_f16m1(__VA_ARGS__) |
| #define vsseg5e16_v_f16m1_m | ( | ... | ) | __riscv_vsseg5e16_v_f16m1_m(__VA_ARGS__) |
| #define vsseg5e16_v_f16mf2 | ( | ... | ) | __riscv_vsseg5e16_v_f16mf2(__VA_ARGS__) |
| #define vsseg5e16_v_f16mf2_m | ( | ... | ) | __riscv_vsseg5e16_v_f16mf2_m(__VA_ARGS__) |
| #define vsseg5e16_v_f16mf4 | ( | ... | ) | __riscv_vsseg5e16_v_f16mf4(__VA_ARGS__) |
| #define vsseg5e16_v_f16mf4_m | ( | ... | ) | __riscv_vsseg5e16_v_f16mf4_m(__VA_ARGS__) |
| #define vsseg5e16_v_i16m1 | ( | ... | ) | __riscv_vsseg5e16_v_i16m1(__VA_ARGS__) |
| #define vsseg5e16_v_i16m1_m | ( | ... | ) | __riscv_vsseg5e16_v_i16m1_m(__VA_ARGS__) |
| #define vsseg5e16_v_i16mf2 | ( | ... | ) | __riscv_vsseg5e16_v_i16mf2(__VA_ARGS__) |
| #define vsseg5e16_v_i16mf2_m | ( | ... | ) | __riscv_vsseg5e16_v_i16mf2_m(__VA_ARGS__) |
| #define vsseg5e16_v_i16mf4 | ( | ... | ) | __riscv_vsseg5e16_v_i16mf4(__VA_ARGS__) |
| #define vsseg5e16_v_i16mf4_m | ( | ... | ) | __riscv_vsseg5e16_v_i16mf4_m(__VA_ARGS__) |
| #define vsseg5e16_v_u16m1 | ( | ... | ) | __riscv_vsseg5e16_v_u16m1(__VA_ARGS__) |
| #define vsseg5e16_v_u16m1_m | ( | ... | ) | __riscv_vsseg5e16_v_u16m1_m(__VA_ARGS__) |
| #define vsseg5e16_v_u16mf2 | ( | ... | ) | __riscv_vsseg5e16_v_u16mf2(__VA_ARGS__) |
| #define vsseg5e16_v_u16mf2_m | ( | ... | ) | __riscv_vsseg5e16_v_u16mf2_m(__VA_ARGS__) |
| #define vsseg5e16_v_u16mf4 | ( | ... | ) | __riscv_vsseg5e16_v_u16mf4(__VA_ARGS__) |
| #define vsseg5e16_v_u16mf4_m | ( | ... | ) | __riscv_vsseg5e16_v_u16mf4_m(__VA_ARGS__) |
| #define vsseg5e32_v_f32m1 | ( | ... | ) | __riscv_vsseg5e32_v_f32m1(__VA_ARGS__) |
| #define vsseg5e32_v_f32m1_m | ( | ... | ) | __riscv_vsseg5e32_v_f32m1_m(__VA_ARGS__) |
| #define vsseg5e32_v_f32mf2 | ( | ... | ) | __riscv_vsseg5e32_v_f32mf2(__VA_ARGS__) |
| #define vsseg5e32_v_f32mf2_m | ( | ... | ) | __riscv_vsseg5e32_v_f32mf2_m(__VA_ARGS__) |
| #define vsseg5e32_v_i32m1 | ( | ... | ) | __riscv_vsseg5e32_v_i32m1(__VA_ARGS__) |
| #define vsseg5e32_v_i32m1_m | ( | ... | ) | __riscv_vsseg5e32_v_i32m1_m(__VA_ARGS__) |
| #define vsseg5e32_v_i32mf2 | ( | ... | ) | __riscv_vsseg5e32_v_i32mf2(__VA_ARGS__) |
| #define vsseg5e32_v_i32mf2_m | ( | ... | ) | __riscv_vsseg5e32_v_i32mf2_m(__VA_ARGS__) |
| #define vsseg5e32_v_u32m1 | ( | ... | ) | __riscv_vsseg5e32_v_u32m1(__VA_ARGS__) |
| #define vsseg5e32_v_u32m1_m | ( | ... | ) | __riscv_vsseg5e32_v_u32m1_m(__VA_ARGS__) |
| #define vsseg5e32_v_u32mf2 | ( | ... | ) | __riscv_vsseg5e32_v_u32mf2(__VA_ARGS__) |
| #define vsseg5e32_v_u32mf2_m | ( | ... | ) | __riscv_vsseg5e32_v_u32mf2_m(__VA_ARGS__) |
| #define vsseg5e64_v_f64m1 | ( | ... | ) | __riscv_vsseg5e64_v_f64m1(__VA_ARGS__) |
| #define vsseg5e64_v_f64m1_m | ( | ... | ) | __riscv_vsseg5e64_v_f64m1_m(__VA_ARGS__) |
| #define vsseg5e64_v_i64m1 | ( | ... | ) | __riscv_vsseg5e64_v_i64m1(__VA_ARGS__) |
| #define vsseg5e64_v_i64m1_m | ( | ... | ) | __riscv_vsseg5e64_v_i64m1_m(__VA_ARGS__) |
| #define vsseg5e64_v_u64m1 | ( | ... | ) | __riscv_vsseg5e64_v_u64m1(__VA_ARGS__) |
| #define vsseg5e64_v_u64m1_m | ( | ... | ) | __riscv_vsseg5e64_v_u64m1_m(__VA_ARGS__) |
| #define vsseg5e8_v_i8m1 | ( | ... | ) | __riscv_vsseg5e8_v_i8m1(__VA_ARGS__) |
| #define vsseg5e8_v_i8m1_m | ( | ... | ) | __riscv_vsseg5e8_v_i8m1_m(__VA_ARGS__) |
| #define vsseg5e8_v_i8mf2 | ( | ... | ) | __riscv_vsseg5e8_v_i8mf2(__VA_ARGS__) |
| #define vsseg5e8_v_i8mf2_m | ( | ... | ) | __riscv_vsseg5e8_v_i8mf2_m(__VA_ARGS__) |
| #define vsseg5e8_v_i8mf4 | ( | ... | ) | __riscv_vsseg5e8_v_i8mf4(__VA_ARGS__) |
| #define vsseg5e8_v_i8mf4_m | ( | ... | ) | __riscv_vsseg5e8_v_i8mf4_m(__VA_ARGS__) |
| #define vsseg5e8_v_i8mf8 | ( | ... | ) | __riscv_vsseg5e8_v_i8mf8(__VA_ARGS__) |
| #define vsseg5e8_v_i8mf8_m | ( | ... | ) | __riscv_vsseg5e8_v_i8mf8_m(__VA_ARGS__) |
| #define vsseg5e8_v_u8m1 | ( | ... | ) | __riscv_vsseg5e8_v_u8m1(__VA_ARGS__) |
| #define vsseg5e8_v_u8m1_m | ( | ... | ) | __riscv_vsseg5e8_v_u8m1_m(__VA_ARGS__) |
| #define vsseg5e8_v_u8mf2 | ( | ... | ) | __riscv_vsseg5e8_v_u8mf2(__VA_ARGS__) |
| #define vsseg5e8_v_u8mf2_m | ( | ... | ) | __riscv_vsseg5e8_v_u8mf2_m(__VA_ARGS__) |
| #define vsseg5e8_v_u8mf4 | ( | ... | ) | __riscv_vsseg5e8_v_u8mf4(__VA_ARGS__) |
| #define vsseg5e8_v_u8mf4_m | ( | ... | ) | __riscv_vsseg5e8_v_u8mf4_m(__VA_ARGS__) |
| #define vsseg5e8_v_u8mf8 | ( | ... | ) | __riscv_vsseg5e8_v_u8mf8(__VA_ARGS__) |
| #define vsseg5e8_v_u8mf8_m | ( | ... | ) | __riscv_vsseg5e8_v_u8mf8_m(__VA_ARGS__) |
| #define vsseg6e16_v_f16m1 | ( | ... | ) | __riscv_vsseg6e16_v_f16m1(__VA_ARGS__) |
| #define vsseg6e16_v_f16m1_m | ( | ... | ) | __riscv_vsseg6e16_v_f16m1_m(__VA_ARGS__) |
| #define vsseg6e16_v_f16mf2 | ( | ... | ) | __riscv_vsseg6e16_v_f16mf2(__VA_ARGS__) |
| #define vsseg6e16_v_f16mf2_m | ( | ... | ) | __riscv_vsseg6e16_v_f16mf2_m(__VA_ARGS__) |
| #define vsseg6e16_v_f16mf4 | ( | ... | ) | __riscv_vsseg6e16_v_f16mf4(__VA_ARGS__) |
| #define vsseg6e16_v_f16mf4_m | ( | ... | ) | __riscv_vsseg6e16_v_f16mf4_m(__VA_ARGS__) |
| #define vsseg6e16_v_i16m1 | ( | ... | ) | __riscv_vsseg6e16_v_i16m1(__VA_ARGS__) |
| #define vsseg6e16_v_i16m1_m | ( | ... | ) | __riscv_vsseg6e16_v_i16m1_m(__VA_ARGS__) |
| #define vsseg6e16_v_i16mf2 | ( | ... | ) | __riscv_vsseg6e16_v_i16mf2(__VA_ARGS__) |
| #define vsseg6e16_v_i16mf2_m | ( | ... | ) | __riscv_vsseg6e16_v_i16mf2_m(__VA_ARGS__) |
| #define vsseg6e16_v_i16mf4 | ( | ... | ) | __riscv_vsseg6e16_v_i16mf4(__VA_ARGS__) |
| #define vsseg6e16_v_i16mf4_m | ( | ... | ) | __riscv_vsseg6e16_v_i16mf4_m(__VA_ARGS__) |
| #define vsseg6e16_v_u16m1 | ( | ... | ) | __riscv_vsseg6e16_v_u16m1(__VA_ARGS__) |
| #define vsseg6e16_v_u16m1_m | ( | ... | ) | __riscv_vsseg6e16_v_u16m1_m(__VA_ARGS__) |
| #define vsseg6e16_v_u16mf2 | ( | ... | ) | __riscv_vsseg6e16_v_u16mf2(__VA_ARGS__) |
| #define vsseg6e16_v_u16mf2_m | ( | ... | ) | __riscv_vsseg6e16_v_u16mf2_m(__VA_ARGS__) |
| #define vsseg6e16_v_u16mf4 | ( | ... | ) | __riscv_vsseg6e16_v_u16mf4(__VA_ARGS__) |
| #define vsseg6e16_v_u16mf4_m | ( | ... | ) | __riscv_vsseg6e16_v_u16mf4_m(__VA_ARGS__) |
| #define vsseg6e32_v_f32m1 | ( | ... | ) | __riscv_vsseg6e32_v_f32m1(__VA_ARGS__) |
| #define vsseg6e32_v_f32m1_m | ( | ... | ) | __riscv_vsseg6e32_v_f32m1_m(__VA_ARGS__) |
| #define vsseg6e32_v_f32mf2 | ( | ... | ) | __riscv_vsseg6e32_v_f32mf2(__VA_ARGS__) |
| #define vsseg6e32_v_f32mf2_m | ( | ... | ) | __riscv_vsseg6e32_v_f32mf2_m(__VA_ARGS__) |
| #define vsseg6e32_v_i32m1 | ( | ... | ) | __riscv_vsseg6e32_v_i32m1(__VA_ARGS__) |
| #define vsseg6e32_v_i32m1_m | ( | ... | ) | __riscv_vsseg6e32_v_i32m1_m(__VA_ARGS__) |
| #define vsseg6e32_v_i32mf2 | ( | ... | ) | __riscv_vsseg6e32_v_i32mf2(__VA_ARGS__) |
| #define vsseg6e32_v_i32mf2_m | ( | ... | ) | __riscv_vsseg6e32_v_i32mf2_m(__VA_ARGS__) |
| #define vsseg6e32_v_u32m1 | ( | ... | ) | __riscv_vsseg6e32_v_u32m1(__VA_ARGS__) |
| #define vsseg6e32_v_u32m1_m | ( | ... | ) | __riscv_vsseg6e32_v_u32m1_m(__VA_ARGS__) |
| #define vsseg6e32_v_u32mf2 | ( | ... | ) | __riscv_vsseg6e32_v_u32mf2(__VA_ARGS__) |
| #define vsseg6e32_v_u32mf2_m | ( | ... | ) | __riscv_vsseg6e32_v_u32mf2_m(__VA_ARGS__) |
| #define vsseg6e64_v_f64m1 | ( | ... | ) | __riscv_vsseg6e64_v_f64m1(__VA_ARGS__) |
| #define vsseg6e64_v_f64m1_m | ( | ... | ) | __riscv_vsseg6e64_v_f64m1_m(__VA_ARGS__) |
| #define vsseg6e64_v_i64m1 | ( | ... | ) | __riscv_vsseg6e64_v_i64m1(__VA_ARGS__) |
| #define vsseg6e64_v_i64m1_m | ( | ... | ) | __riscv_vsseg6e64_v_i64m1_m(__VA_ARGS__) |
| #define vsseg6e64_v_u64m1 | ( | ... | ) | __riscv_vsseg6e64_v_u64m1(__VA_ARGS__) |
| #define vsseg6e64_v_u64m1_m | ( | ... | ) | __riscv_vsseg6e64_v_u64m1_m(__VA_ARGS__) |
| #define vsseg6e8_v_i8m1 | ( | ... | ) | __riscv_vsseg6e8_v_i8m1(__VA_ARGS__) |
| #define vsseg6e8_v_i8m1_m | ( | ... | ) | __riscv_vsseg6e8_v_i8m1_m(__VA_ARGS__) |
| #define vsseg6e8_v_i8mf2 | ( | ... | ) | __riscv_vsseg6e8_v_i8mf2(__VA_ARGS__) |
| #define vsseg6e8_v_i8mf2_m | ( | ... | ) | __riscv_vsseg6e8_v_i8mf2_m(__VA_ARGS__) |
| #define vsseg6e8_v_i8mf4 | ( | ... | ) | __riscv_vsseg6e8_v_i8mf4(__VA_ARGS__) |
| #define vsseg6e8_v_i8mf4_m | ( | ... | ) | __riscv_vsseg6e8_v_i8mf4_m(__VA_ARGS__) |
| #define vsseg6e8_v_i8mf8 | ( | ... | ) | __riscv_vsseg6e8_v_i8mf8(__VA_ARGS__) |
| #define vsseg6e8_v_i8mf8_m | ( | ... | ) | __riscv_vsseg6e8_v_i8mf8_m(__VA_ARGS__) |
| #define vsseg6e8_v_u8m1 | ( | ... | ) | __riscv_vsseg6e8_v_u8m1(__VA_ARGS__) |
| #define vsseg6e8_v_u8m1_m | ( | ... | ) | __riscv_vsseg6e8_v_u8m1_m(__VA_ARGS__) |
| #define vsseg6e8_v_u8mf2 | ( | ... | ) | __riscv_vsseg6e8_v_u8mf2(__VA_ARGS__) |
| #define vsseg6e8_v_u8mf2_m | ( | ... | ) | __riscv_vsseg6e8_v_u8mf2_m(__VA_ARGS__) |
| #define vsseg6e8_v_u8mf4 | ( | ... | ) | __riscv_vsseg6e8_v_u8mf4(__VA_ARGS__) |
| #define vsseg6e8_v_u8mf4_m | ( | ... | ) | __riscv_vsseg6e8_v_u8mf4_m(__VA_ARGS__) |
| #define vsseg6e8_v_u8mf8 | ( | ... | ) | __riscv_vsseg6e8_v_u8mf8(__VA_ARGS__) |
| #define vsseg6e8_v_u8mf8_m | ( | ... | ) | __riscv_vsseg6e8_v_u8mf8_m(__VA_ARGS__) |
| #define vsseg7e16_v_f16m1 | ( | ... | ) | __riscv_vsseg7e16_v_f16m1(__VA_ARGS__) |
| #define vsseg7e16_v_f16m1_m | ( | ... | ) | __riscv_vsseg7e16_v_f16m1_m(__VA_ARGS__) |
| #define vsseg7e16_v_f16mf2 | ( | ... | ) | __riscv_vsseg7e16_v_f16mf2(__VA_ARGS__) |
| #define vsseg7e16_v_f16mf2_m | ( | ... | ) | __riscv_vsseg7e16_v_f16mf2_m(__VA_ARGS__) |
| #define vsseg7e16_v_f16mf4 | ( | ... | ) | __riscv_vsseg7e16_v_f16mf4(__VA_ARGS__) |
| #define vsseg7e16_v_f16mf4_m | ( | ... | ) | __riscv_vsseg7e16_v_f16mf4_m(__VA_ARGS__) |
| #define vsseg7e16_v_i16m1 | ( | ... | ) | __riscv_vsseg7e16_v_i16m1(__VA_ARGS__) |
| #define vsseg7e16_v_i16m1_m | ( | ... | ) | __riscv_vsseg7e16_v_i16m1_m(__VA_ARGS__) |
| #define vsseg7e16_v_i16mf2 | ( | ... | ) | __riscv_vsseg7e16_v_i16mf2(__VA_ARGS__) |
| #define vsseg7e16_v_i16mf2_m | ( | ... | ) | __riscv_vsseg7e16_v_i16mf2_m(__VA_ARGS__) |
| #define vsseg7e16_v_i16mf4 | ( | ... | ) | __riscv_vsseg7e16_v_i16mf4(__VA_ARGS__) |
| #define vsseg7e16_v_i16mf4_m | ( | ... | ) | __riscv_vsseg7e16_v_i16mf4_m(__VA_ARGS__) |
| #define vsseg7e16_v_u16m1 | ( | ... | ) | __riscv_vsseg7e16_v_u16m1(__VA_ARGS__) |
| #define vsseg7e16_v_u16m1_m | ( | ... | ) | __riscv_vsseg7e16_v_u16m1_m(__VA_ARGS__) |
| #define vsseg7e16_v_u16mf2 | ( | ... | ) | __riscv_vsseg7e16_v_u16mf2(__VA_ARGS__) |
| #define vsseg7e16_v_u16mf2_m | ( | ... | ) | __riscv_vsseg7e16_v_u16mf2_m(__VA_ARGS__) |
| #define vsseg7e16_v_u16mf4 | ( | ... | ) | __riscv_vsseg7e16_v_u16mf4(__VA_ARGS__) |
| #define vsseg7e16_v_u16mf4_m | ( | ... | ) | __riscv_vsseg7e16_v_u16mf4_m(__VA_ARGS__) |
| #define vsseg7e32_v_f32m1 | ( | ... | ) | __riscv_vsseg7e32_v_f32m1(__VA_ARGS__) |
| #define vsseg7e32_v_f32m1_m | ( | ... | ) | __riscv_vsseg7e32_v_f32m1_m(__VA_ARGS__) |
| #define vsseg7e32_v_f32mf2 | ( | ... | ) | __riscv_vsseg7e32_v_f32mf2(__VA_ARGS__) |
| #define vsseg7e32_v_f32mf2_m | ( | ... | ) | __riscv_vsseg7e32_v_f32mf2_m(__VA_ARGS__) |
| #define vsseg7e32_v_i32m1 | ( | ... | ) | __riscv_vsseg7e32_v_i32m1(__VA_ARGS__) |
| #define vsseg7e32_v_i32m1_m | ( | ... | ) | __riscv_vsseg7e32_v_i32m1_m(__VA_ARGS__) |
| #define vsseg7e32_v_i32mf2 | ( | ... | ) | __riscv_vsseg7e32_v_i32mf2(__VA_ARGS__) |
| #define vsseg7e32_v_i32mf2_m | ( | ... | ) | __riscv_vsseg7e32_v_i32mf2_m(__VA_ARGS__) |
| #define vsseg7e32_v_u32m1 | ( | ... | ) | __riscv_vsseg7e32_v_u32m1(__VA_ARGS__) |
| #define vsseg7e32_v_u32m1_m | ( | ... | ) | __riscv_vsseg7e32_v_u32m1_m(__VA_ARGS__) |
| #define vsseg7e32_v_u32mf2 | ( | ... | ) | __riscv_vsseg7e32_v_u32mf2(__VA_ARGS__) |
| #define vsseg7e32_v_u32mf2_m | ( | ... | ) | __riscv_vsseg7e32_v_u32mf2_m(__VA_ARGS__) |
| #define vsseg7e64_v_f64m1 | ( | ... | ) | __riscv_vsseg7e64_v_f64m1(__VA_ARGS__) |
| #define vsseg7e64_v_f64m1_m | ( | ... | ) | __riscv_vsseg7e64_v_f64m1_m(__VA_ARGS__) |
| #define vsseg7e64_v_i64m1 | ( | ... | ) | __riscv_vsseg7e64_v_i64m1(__VA_ARGS__) |
| #define vsseg7e64_v_i64m1_m | ( | ... | ) | __riscv_vsseg7e64_v_i64m1_m(__VA_ARGS__) |
| #define vsseg7e64_v_u64m1 | ( | ... | ) | __riscv_vsseg7e64_v_u64m1(__VA_ARGS__) |
| #define vsseg7e64_v_u64m1_m | ( | ... | ) | __riscv_vsseg7e64_v_u64m1_m(__VA_ARGS__) |
| #define vsseg7e8_v_i8m1 | ( | ... | ) | __riscv_vsseg7e8_v_i8m1(__VA_ARGS__) |
| #define vsseg7e8_v_i8m1_m | ( | ... | ) | __riscv_vsseg7e8_v_i8m1_m(__VA_ARGS__) |
| #define vsseg7e8_v_i8mf2 | ( | ... | ) | __riscv_vsseg7e8_v_i8mf2(__VA_ARGS__) |
| #define vsseg7e8_v_i8mf2_m | ( | ... | ) | __riscv_vsseg7e8_v_i8mf2_m(__VA_ARGS__) |
| #define vsseg7e8_v_i8mf4 | ( | ... | ) | __riscv_vsseg7e8_v_i8mf4(__VA_ARGS__) |
| #define vsseg7e8_v_i8mf4_m | ( | ... | ) | __riscv_vsseg7e8_v_i8mf4_m(__VA_ARGS__) |
| #define vsseg7e8_v_i8mf8 | ( | ... | ) | __riscv_vsseg7e8_v_i8mf8(__VA_ARGS__) |
| #define vsseg7e8_v_i8mf8_m | ( | ... | ) | __riscv_vsseg7e8_v_i8mf8_m(__VA_ARGS__) |
| #define vsseg7e8_v_u8m1 | ( | ... | ) | __riscv_vsseg7e8_v_u8m1(__VA_ARGS__) |
| #define vsseg7e8_v_u8m1_m | ( | ... | ) | __riscv_vsseg7e8_v_u8m1_m(__VA_ARGS__) |
| #define vsseg7e8_v_u8mf2 | ( | ... | ) | __riscv_vsseg7e8_v_u8mf2(__VA_ARGS__) |
| #define vsseg7e8_v_u8mf2_m | ( | ... | ) | __riscv_vsseg7e8_v_u8mf2_m(__VA_ARGS__) |
| #define vsseg7e8_v_u8mf4 | ( | ... | ) | __riscv_vsseg7e8_v_u8mf4(__VA_ARGS__) |
| #define vsseg7e8_v_u8mf4_m | ( | ... | ) | __riscv_vsseg7e8_v_u8mf4_m(__VA_ARGS__) |
| #define vsseg7e8_v_u8mf8 | ( | ... | ) | __riscv_vsseg7e8_v_u8mf8(__VA_ARGS__) |
| #define vsseg7e8_v_u8mf8_m | ( | ... | ) | __riscv_vsseg7e8_v_u8mf8_m(__VA_ARGS__) |
| #define vsseg8e16_v_f16m1 | ( | ... | ) | __riscv_vsseg8e16_v_f16m1(__VA_ARGS__) |
| #define vsseg8e16_v_f16m1_m | ( | ... | ) | __riscv_vsseg8e16_v_f16m1_m(__VA_ARGS__) |
| #define vsseg8e16_v_f16mf2 | ( | ... | ) | __riscv_vsseg8e16_v_f16mf2(__VA_ARGS__) |
| #define vsseg8e16_v_f16mf2_m | ( | ... | ) | __riscv_vsseg8e16_v_f16mf2_m(__VA_ARGS__) |
| #define vsseg8e16_v_f16mf4 | ( | ... | ) | __riscv_vsseg8e16_v_f16mf4(__VA_ARGS__) |
| #define vsseg8e16_v_f16mf4_m | ( | ... | ) | __riscv_vsseg8e16_v_f16mf4_m(__VA_ARGS__) |
| #define vsseg8e16_v_i16m1 | ( | ... | ) | __riscv_vsseg8e16_v_i16m1(__VA_ARGS__) |
| #define vsseg8e16_v_i16m1_m | ( | ... | ) | __riscv_vsseg8e16_v_i16m1_m(__VA_ARGS__) |
| #define vsseg8e16_v_i16mf2 | ( | ... | ) | __riscv_vsseg8e16_v_i16mf2(__VA_ARGS__) |
| #define vsseg8e16_v_i16mf2_m | ( | ... | ) | __riscv_vsseg8e16_v_i16mf2_m(__VA_ARGS__) |
| #define vsseg8e16_v_i16mf4 | ( | ... | ) | __riscv_vsseg8e16_v_i16mf4(__VA_ARGS__) |
| #define vsseg8e16_v_i16mf4_m | ( | ... | ) | __riscv_vsseg8e16_v_i16mf4_m(__VA_ARGS__) |
| #define vsseg8e16_v_u16m1 | ( | ... | ) | __riscv_vsseg8e16_v_u16m1(__VA_ARGS__) |
| #define vsseg8e16_v_u16m1_m | ( | ... | ) | __riscv_vsseg8e16_v_u16m1_m(__VA_ARGS__) |
| #define vsseg8e16_v_u16mf2 | ( | ... | ) | __riscv_vsseg8e16_v_u16mf2(__VA_ARGS__) |
| #define vsseg8e16_v_u16mf2_m | ( | ... | ) | __riscv_vsseg8e16_v_u16mf2_m(__VA_ARGS__) |
| #define vsseg8e16_v_u16mf4 | ( | ... | ) | __riscv_vsseg8e16_v_u16mf4(__VA_ARGS__) |
| #define vsseg8e16_v_u16mf4_m | ( | ... | ) | __riscv_vsseg8e16_v_u16mf4_m(__VA_ARGS__) |
| #define vsseg8e32_v_f32m1 | ( | ... | ) | __riscv_vsseg8e32_v_f32m1(__VA_ARGS__) |
| #define vsseg8e32_v_f32m1_m | ( | ... | ) | __riscv_vsseg8e32_v_f32m1_m(__VA_ARGS__) |
| #define vsseg8e32_v_f32mf2 | ( | ... | ) | __riscv_vsseg8e32_v_f32mf2(__VA_ARGS__) |
| #define vsseg8e32_v_f32mf2_m | ( | ... | ) | __riscv_vsseg8e32_v_f32mf2_m(__VA_ARGS__) |
| #define vsseg8e32_v_i32m1 | ( | ... | ) | __riscv_vsseg8e32_v_i32m1(__VA_ARGS__) |
| #define vsseg8e32_v_i32m1_m | ( | ... | ) | __riscv_vsseg8e32_v_i32m1_m(__VA_ARGS__) |
| #define vsseg8e32_v_i32mf2 | ( | ... | ) | __riscv_vsseg8e32_v_i32mf2(__VA_ARGS__) |
| #define vsseg8e32_v_i32mf2_m | ( | ... | ) | __riscv_vsseg8e32_v_i32mf2_m(__VA_ARGS__) |
| #define vsseg8e32_v_u32m1 | ( | ... | ) | __riscv_vsseg8e32_v_u32m1(__VA_ARGS__) |
| #define vsseg8e32_v_u32m1_m | ( | ... | ) | __riscv_vsseg8e32_v_u32m1_m(__VA_ARGS__) |
| #define vsseg8e32_v_u32mf2 | ( | ... | ) | __riscv_vsseg8e32_v_u32mf2(__VA_ARGS__) |
| #define vsseg8e32_v_u32mf2_m | ( | ... | ) | __riscv_vsseg8e32_v_u32mf2_m(__VA_ARGS__) |
| #define vsseg8e64_v_f64m1 | ( | ... | ) | __riscv_vsseg8e64_v_f64m1(__VA_ARGS__) |
| #define vsseg8e64_v_f64m1_m | ( | ... | ) | __riscv_vsseg8e64_v_f64m1_m(__VA_ARGS__) |
| #define vsseg8e64_v_i64m1 | ( | ... | ) | __riscv_vsseg8e64_v_i64m1(__VA_ARGS__) |
| #define vsseg8e64_v_i64m1_m | ( | ... | ) | __riscv_vsseg8e64_v_i64m1_m(__VA_ARGS__) |
| #define vsseg8e64_v_u64m1 | ( | ... | ) | __riscv_vsseg8e64_v_u64m1(__VA_ARGS__) |
| #define vsseg8e64_v_u64m1_m | ( | ... | ) | __riscv_vsseg8e64_v_u64m1_m(__VA_ARGS__) |
| #define vsseg8e8_v_i8m1 | ( | ... | ) | __riscv_vsseg8e8_v_i8m1(__VA_ARGS__) |
| #define vsseg8e8_v_i8m1_m | ( | ... | ) | __riscv_vsseg8e8_v_i8m1_m(__VA_ARGS__) |
| #define vsseg8e8_v_i8mf2 | ( | ... | ) | __riscv_vsseg8e8_v_i8mf2(__VA_ARGS__) |
| #define vsseg8e8_v_i8mf2_m | ( | ... | ) | __riscv_vsseg8e8_v_i8mf2_m(__VA_ARGS__) |
| #define vsseg8e8_v_i8mf4 | ( | ... | ) | __riscv_vsseg8e8_v_i8mf4(__VA_ARGS__) |
| #define vsseg8e8_v_i8mf4_m | ( | ... | ) | __riscv_vsseg8e8_v_i8mf4_m(__VA_ARGS__) |
| #define vsseg8e8_v_i8mf8 | ( | ... | ) | __riscv_vsseg8e8_v_i8mf8(__VA_ARGS__) |
| #define vsseg8e8_v_i8mf8_m | ( | ... | ) | __riscv_vsseg8e8_v_i8mf8_m(__VA_ARGS__) |
| #define vsseg8e8_v_u8m1 | ( | ... | ) | __riscv_vsseg8e8_v_u8m1(__VA_ARGS__) |
| #define vsseg8e8_v_u8m1_m | ( | ... | ) | __riscv_vsseg8e8_v_u8m1_m(__VA_ARGS__) |
| #define vsseg8e8_v_u8mf2 | ( | ... | ) | __riscv_vsseg8e8_v_u8mf2(__VA_ARGS__) |
| #define vsseg8e8_v_u8mf2_m | ( | ... | ) | __riscv_vsseg8e8_v_u8mf2_m(__VA_ARGS__) |
| #define vsseg8e8_v_u8mf4 | ( | ... | ) | __riscv_vsseg8e8_v_u8mf4(__VA_ARGS__) |
| #define vsseg8e8_v_u8mf4_m | ( | ... | ) | __riscv_vsseg8e8_v_u8mf4_m(__VA_ARGS__) |
| #define vsseg8e8_v_u8mf8 | ( | ... | ) | __riscv_vsseg8e8_v_u8mf8(__VA_ARGS__) |
| #define vsseg8e8_v_u8mf8_m | ( | ... | ) | __riscv_vsseg8e8_v_u8mf8_m(__VA_ARGS__) |
| #define vssra_vv_i16m1 | ( | ... | ) | __riscv_vssra_vv_i16m1(__VA_ARGS__) |
| #define vssra_vv_i16m1_m | ( | ... | ) | __riscv_vssra_vv_i16m1_tumu(__VA_ARGS__) |
| #define vssra_vv_i16m2 | ( | ... | ) | __riscv_vssra_vv_i16m2(__VA_ARGS__) |
| #define vssra_vv_i16m2_m | ( | ... | ) | __riscv_vssra_vv_i16m2_tumu(__VA_ARGS__) |
| #define vssra_vv_i16m4 | ( | ... | ) | __riscv_vssra_vv_i16m4(__VA_ARGS__) |
| #define vssra_vv_i16m4_m | ( | ... | ) | __riscv_vssra_vv_i16m4_tumu(__VA_ARGS__) |
| #define vssra_vv_i16m8 | ( | ... | ) | __riscv_vssra_vv_i16m8(__VA_ARGS__) |
| #define vssra_vv_i16m8_m | ( | ... | ) | __riscv_vssra_vv_i16m8_tumu(__VA_ARGS__) |
| #define vssra_vv_i16mf2 | ( | ... | ) | __riscv_vssra_vv_i16mf2(__VA_ARGS__) |
| #define vssra_vv_i16mf2_m | ( | ... | ) | __riscv_vssra_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vssra_vv_i16mf4 | ( | ... | ) | __riscv_vssra_vv_i16mf4(__VA_ARGS__) |
| #define vssra_vv_i16mf4_m | ( | ... | ) | __riscv_vssra_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vssra_vv_i32m1 | ( | ... | ) | __riscv_vssra_vv_i32m1(__VA_ARGS__) |
| #define vssra_vv_i32m1_m | ( | ... | ) | __riscv_vssra_vv_i32m1_tumu(__VA_ARGS__) |
| #define vssra_vv_i32m2 | ( | ... | ) | __riscv_vssra_vv_i32m2(__VA_ARGS__) |
| #define vssra_vv_i32m2_m | ( | ... | ) | __riscv_vssra_vv_i32m2_tumu(__VA_ARGS__) |
| #define vssra_vv_i32m4 | ( | ... | ) | __riscv_vssra_vv_i32m4(__VA_ARGS__) |
| #define vssra_vv_i32m4_m | ( | ... | ) | __riscv_vssra_vv_i32m4_tumu(__VA_ARGS__) |
| #define vssra_vv_i32m8 | ( | ... | ) | __riscv_vssra_vv_i32m8(__VA_ARGS__) |
| #define vssra_vv_i32m8_m | ( | ... | ) | __riscv_vssra_vv_i32m8_tumu(__VA_ARGS__) |
| #define vssra_vv_i32mf2 | ( | ... | ) | __riscv_vssra_vv_i32mf2(__VA_ARGS__) |
| #define vssra_vv_i32mf2_m | ( | ... | ) | __riscv_vssra_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vssra_vv_i64m1 | ( | ... | ) | __riscv_vssra_vv_i64m1(__VA_ARGS__) |
| #define vssra_vv_i64m1_m | ( | ... | ) | __riscv_vssra_vv_i64m1_tumu(__VA_ARGS__) |
| #define vssra_vv_i64m2 | ( | ... | ) | __riscv_vssra_vv_i64m2(__VA_ARGS__) |
| #define vssra_vv_i64m2_m | ( | ... | ) | __riscv_vssra_vv_i64m2_tumu(__VA_ARGS__) |
| #define vssra_vv_i64m4 | ( | ... | ) | __riscv_vssra_vv_i64m4(__VA_ARGS__) |
| #define vssra_vv_i64m4_m | ( | ... | ) | __riscv_vssra_vv_i64m4_tumu(__VA_ARGS__) |
| #define vssra_vv_i64m8 | ( | ... | ) | __riscv_vssra_vv_i64m8(__VA_ARGS__) |
| #define vssra_vv_i64m8_m | ( | ... | ) | __riscv_vssra_vv_i64m8_tumu(__VA_ARGS__) |
| #define vssra_vv_i8m1 | ( | ... | ) | __riscv_vssra_vv_i8m1(__VA_ARGS__) |
| #define vssra_vv_i8m1_m | ( | ... | ) | __riscv_vssra_vv_i8m1_tumu(__VA_ARGS__) |
| #define vssra_vv_i8m2 | ( | ... | ) | __riscv_vssra_vv_i8m2(__VA_ARGS__) |
| #define vssra_vv_i8m2_m | ( | ... | ) | __riscv_vssra_vv_i8m2_tumu(__VA_ARGS__) |
| #define vssra_vv_i8m4 | ( | ... | ) | __riscv_vssra_vv_i8m4(__VA_ARGS__) |
| #define vssra_vv_i8m4_m | ( | ... | ) | __riscv_vssra_vv_i8m4_tumu(__VA_ARGS__) |
| #define vssra_vv_i8m8 | ( | ... | ) | __riscv_vssra_vv_i8m8(__VA_ARGS__) |
| #define vssra_vv_i8m8_m | ( | ... | ) | __riscv_vssra_vv_i8m8_tumu(__VA_ARGS__) |
| #define vssra_vv_i8mf2 | ( | ... | ) | __riscv_vssra_vv_i8mf2(__VA_ARGS__) |
| #define vssra_vv_i8mf2_m | ( | ... | ) | __riscv_vssra_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vssra_vv_i8mf4 | ( | ... | ) | __riscv_vssra_vv_i8mf4(__VA_ARGS__) |
| #define vssra_vv_i8mf4_m | ( | ... | ) | __riscv_vssra_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vssra_vv_i8mf8 | ( | ... | ) | __riscv_vssra_vv_i8mf8(__VA_ARGS__) |
| #define vssra_vv_i8mf8_m | ( | ... | ) | __riscv_vssra_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vssra_vx_i16m1 | ( | ... | ) | __riscv_vssra_vx_i16m1(__VA_ARGS__) |
| #define vssra_vx_i16m1_m | ( | ... | ) | __riscv_vssra_vx_i16m1_tumu(__VA_ARGS__) |
| #define vssra_vx_i16m2 | ( | ... | ) | __riscv_vssra_vx_i16m2(__VA_ARGS__) |
| #define vssra_vx_i16m2_m | ( | ... | ) | __riscv_vssra_vx_i16m2_tumu(__VA_ARGS__) |
| #define vssra_vx_i16m4 | ( | ... | ) | __riscv_vssra_vx_i16m4(__VA_ARGS__) |
| #define vssra_vx_i16m4_m | ( | ... | ) | __riscv_vssra_vx_i16m4_tumu(__VA_ARGS__) |
| #define vssra_vx_i16m8 | ( | ... | ) | __riscv_vssra_vx_i16m8(__VA_ARGS__) |
| #define vssra_vx_i16m8_m | ( | ... | ) | __riscv_vssra_vx_i16m8_tumu(__VA_ARGS__) |
| #define vssra_vx_i16mf2 | ( | ... | ) | __riscv_vssra_vx_i16mf2(__VA_ARGS__) |
| #define vssra_vx_i16mf2_m | ( | ... | ) | __riscv_vssra_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vssra_vx_i16mf4 | ( | ... | ) | __riscv_vssra_vx_i16mf4(__VA_ARGS__) |
| #define vssra_vx_i16mf4_m | ( | ... | ) | __riscv_vssra_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vssra_vx_i32m1 | ( | ... | ) | __riscv_vssra_vx_i32m1(__VA_ARGS__) |
| #define vssra_vx_i32m1_m | ( | ... | ) | __riscv_vssra_vx_i32m1_tumu(__VA_ARGS__) |
| #define vssra_vx_i32m2 | ( | ... | ) | __riscv_vssra_vx_i32m2(__VA_ARGS__) |
| #define vssra_vx_i32m2_m | ( | ... | ) | __riscv_vssra_vx_i32m2_tumu(__VA_ARGS__) |
| #define vssra_vx_i32m4 | ( | ... | ) | __riscv_vssra_vx_i32m4(__VA_ARGS__) |
| #define vssra_vx_i32m4_m | ( | ... | ) | __riscv_vssra_vx_i32m4_tumu(__VA_ARGS__) |
| #define vssra_vx_i32m8 | ( | ... | ) | __riscv_vssra_vx_i32m8(__VA_ARGS__) |
| #define vssra_vx_i32m8_m | ( | ... | ) | __riscv_vssra_vx_i32m8_tumu(__VA_ARGS__) |
| #define vssra_vx_i32mf2 | ( | ... | ) | __riscv_vssra_vx_i32mf2(__VA_ARGS__) |
| #define vssra_vx_i32mf2_m | ( | ... | ) | __riscv_vssra_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vssra_vx_i64m1 | ( | ... | ) | __riscv_vssra_vx_i64m1(__VA_ARGS__) |
| #define vssra_vx_i64m1_m | ( | ... | ) | __riscv_vssra_vx_i64m1_tumu(__VA_ARGS__) |
| #define vssra_vx_i64m2 | ( | ... | ) | __riscv_vssra_vx_i64m2(__VA_ARGS__) |
| #define vssra_vx_i64m2_m | ( | ... | ) | __riscv_vssra_vx_i64m2_tumu(__VA_ARGS__) |
| #define vssra_vx_i64m4 | ( | ... | ) | __riscv_vssra_vx_i64m4(__VA_ARGS__) |
| #define vssra_vx_i64m4_m | ( | ... | ) | __riscv_vssra_vx_i64m4_tumu(__VA_ARGS__) |
| #define vssra_vx_i64m8 | ( | ... | ) | __riscv_vssra_vx_i64m8(__VA_ARGS__) |
| #define vssra_vx_i64m8_m | ( | ... | ) | __riscv_vssra_vx_i64m8_tumu(__VA_ARGS__) |
| #define vssra_vx_i8m1 | ( | ... | ) | __riscv_vssra_vx_i8m1(__VA_ARGS__) |
| #define vssra_vx_i8m1_m | ( | ... | ) | __riscv_vssra_vx_i8m1_tumu(__VA_ARGS__) |
| #define vssra_vx_i8m2 | ( | ... | ) | __riscv_vssra_vx_i8m2(__VA_ARGS__) |
| #define vssra_vx_i8m2_m | ( | ... | ) | __riscv_vssra_vx_i8m2_tumu(__VA_ARGS__) |
| #define vssra_vx_i8m4 | ( | ... | ) | __riscv_vssra_vx_i8m4(__VA_ARGS__) |
| #define vssra_vx_i8m4_m | ( | ... | ) | __riscv_vssra_vx_i8m4_tumu(__VA_ARGS__) |
| #define vssra_vx_i8m8 | ( | ... | ) | __riscv_vssra_vx_i8m8(__VA_ARGS__) |
| #define vssra_vx_i8m8_m | ( | ... | ) | __riscv_vssra_vx_i8m8_tumu(__VA_ARGS__) |
| #define vssra_vx_i8mf2 | ( | ... | ) | __riscv_vssra_vx_i8mf2(__VA_ARGS__) |
| #define vssra_vx_i8mf2_m | ( | ... | ) | __riscv_vssra_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vssra_vx_i8mf4 | ( | ... | ) | __riscv_vssra_vx_i8mf4(__VA_ARGS__) |
| #define vssra_vx_i8mf4_m | ( | ... | ) | __riscv_vssra_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vssra_vx_i8mf8 | ( | ... | ) | __riscv_vssra_vx_i8mf8(__VA_ARGS__) |
| #define vssra_vx_i8mf8_m | ( | ... | ) | __riscv_vssra_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vssrl_vv_u16m1 | ( | ... | ) | __riscv_vssrl_vv_u16m1(__VA_ARGS__) |
| #define vssrl_vv_u16m1_m | ( | ... | ) | __riscv_vssrl_vv_u16m1_tumu(__VA_ARGS__) |
| #define vssrl_vv_u16m2 | ( | ... | ) | __riscv_vssrl_vv_u16m2(__VA_ARGS__) |
| #define vssrl_vv_u16m2_m | ( | ... | ) | __riscv_vssrl_vv_u16m2_tumu(__VA_ARGS__) |
| #define vssrl_vv_u16m4 | ( | ... | ) | __riscv_vssrl_vv_u16m4(__VA_ARGS__) |
| #define vssrl_vv_u16m4_m | ( | ... | ) | __riscv_vssrl_vv_u16m4_tumu(__VA_ARGS__) |
| #define vssrl_vv_u16m8 | ( | ... | ) | __riscv_vssrl_vv_u16m8(__VA_ARGS__) |
| #define vssrl_vv_u16m8_m | ( | ... | ) | __riscv_vssrl_vv_u16m8_tumu(__VA_ARGS__) |
| #define vssrl_vv_u16mf2 | ( | ... | ) | __riscv_vssrl_vv_u16mf2(__VA_ARGS__) |
| #define vssrl_vv_u16mf2_m | ( | ... | ) | __riscv_vssrl_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vssrl_vv_u16mf4 | ( | ... | ) | __riscv_vssrl_vv_u16mf4(__VA_ARGS__) |
| #define vssrl_vv_u16mf4_m | ( | ... | ) | __riscv_vssrl_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vssrl_vv_u32m1 | ( | ... | ) | __riscv_vssrl_vv_u32m1(__VA_ARGS__) |
| #define vssrl_vv_u32m1_m | ( | ... | ) | __riscv_vssrl_vv_u32m1_tumu(__VA_ARGS__) |
| #define vssrl_vv_u32m2 | ( | ... | ) | __riscv_vssrl_vv_u32m2(__VA_ARGS__) |
| #define vssrl_vv_u32m2_m | ( | ... | ) | __riscv_vssrl_vv_u32m2_tumu(__VA_ARGS__) |
| #define vssrl_vv_u32m4 | ( | ... | ) | __riscv_vssrl_vv_u32m4(__VA_ARGS__) |
| #define vssrl_vv_u32m4_m | ( | ... | ) | __riscv_vssrl_vv_u32m4_tumu(__VA_ARGS__) |
| #define vssrl_vv_u32m8 | ( | ... | ) | __riscv_vssrl_vv_u32m8(__VA_ARGS__) |
| #define vssrl_vv_u32m8_m | ( | ... | ) | __riscv_vssrl_vv_u32m8_tumu(__VA_ARGS__) |
| #define vssrl_vv_u32mf2 | ( | ... | ) | __riscv_vssrl_vv_u32mf2(__VA_ARGS__) |
| #define vssrl_vv_u32mf2_m | ( | ... | ) | __riscv_vssrl_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vssrl_vv_u64m1 | ( | ... | ) | __riscv_vssrl_vv_u64m1(__VA_ARGS__) |
| #define vssrl_vv_u64m1_m | ( | ... | ) | __riscv_vssrl_vv_u64m1_tumu(__VA_ARGS__) |
| #define vssrl_vv_u64m2 | ( | ... | ) | __riscv_vssrl_vv_u64m2(__VA_ARGS__) |
| #define vssrl_vv_u64m2_m | ( | ... | ) | __riscv_vssrl_vv_u64m2_tumu(__VA_ARGS__) |
| #define vssrl_vv_u64m4 | ( | ... | ) | __riscv_vssrl_vv_u64m4(__VA_ARGS__) |
| #define vssrl_vv_u64m4_m | ( | ... | ) | __riscv_vssrl_vv_u64m4_tumu(__VA_ARGS__) |
| #define vssrl_vv_u64m8 | ( | ... | ) | __riscv_vssrl_vv_u64m8(__VA_ARGS__) |
| #define vssrl_vv_u64m8_m | ( | ... | ) | __riscv_vssrl_vv_u64m8_tumu(__VA_ARGS__) |
| #define vssrl_vv_u8m1 | ( | ... | ) | __riscv_vssrl_vv_u8m1(__VA_ARGS__) |
| #define vssrl_vv_u8m1_m | ( | ... | ) | __riscv_vssrl_vv_u8m1_tumu(__VA_ARGS__) |
| #define vssrl_vv_u8m2 | ( | ... | ) | __riscv_vssrl_vv_u8m2(__VA_ARGS__) |
| #define vssrl_vv_u8m2_m | ( | ... | ) | __riscv_vssrl_vv_u8m2_tumu(__VA_ARGS__) |
| #define vssrl_vv_u8m4 | ( | ... | ) | __riscv_vssrl_vv_u8m4(__VA_ARGS__) |
| #define vssrl_vv_u8m4_m | ( | ... | ) | __riscv_vssrl_vv_u8m4_tumu(__VA_ARGS__) |
| #define vssrl_vv_u8m8 | ( | ... | ) | __riscv_vssrl_vv_u8m8(__VA_ARGS__) |
| #define vssrl_vv_u8m8_m | ( | ... | ) | __riscv_vssrl_vv_u8m8_tumu(__VA_ARGS__) |
| #define vssrl_vv_u8mf2 | ( | ... | ) | __riscv_vssrl_vv_u8mf2(__VA_ARGS__) |
| #define vssrl_vv_u8mf2_m | ( | ... | ) | __riscv_vssrl_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vssrl_vv_u8mf4 | ( | ... | ) | __riscv_vssrl_vv_u8mf4(__VA_ARGS__) |
| #define vssrl_vv_u8mf4_m | ( | ... | ) | __riscv_vssrl_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vssrl_vv_u8mf8 | ( | ... | ) | __riscv_vssrl_vv_u8mf8(__VA_ARGS__) |
| #define vssrl_vv_u8mf8_m | ( | ... | ) | __riscv_vssrl_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vssrl_vx_u16m1 | ( | ... | ) | __riscv_vssrl_vx_u16m1(__VA_ARGS__) |
| #define vssrl_vx_u16m1_m | ( | ... | ) | __riscv_vssrl_vx_u16m1_tumu(__VA_ARGS__) |
| #define vssrl_vx_u16m2 | ( | ... | ) | __riscv_vssrl_vx_u16m2(__VA_ARGS__) |
| #define vssrl_vx_u16m2_m | ( | ... | ) | __riscv_vssrl_vx_u16m2_tumu(__VA_ARGS__) |
| #define vssrl_vx_u16m4 | ( | ... | ) | __riscv_vssrl_vx_u16m4(__VA_ARGS__) |
| #define vssrl_vx_u16m4_m | ( | ... | ) | __riscv_vssrl_vx_u16m4_tumu(__VA_ARGS__) |
| #define vssrl_vx_u16m8 | ( | ... | ) | __riscv_vssrl_vx_u16m8(__VA_ARGS__) |
| #define vssrl_vx_u16m8_m | ( | ... | ) | __riscv_vssrl_vx_u16m8_tumu(__VA_ARGS__) |
| #define vssrl_vx_u16mf2 | ( | ... | ) | __riscv_vssrl_vx_u16mf2(__VA_ARGS__) |
| #define vssrl_vx_u16mf2_m | ( | ... | ) | __riscv_vssrl_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vssrl_vx_u16mf4 | ( | ... | ) | __riscv_vssrl_vx_u16mf4(__VA_ARGS__) |
| #define vssrl_vx_u16mf4_m | ( | ... | ) | __riscv_vssrl_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vssrl_vx_u32m1 | ( | ... | ) | __riscv_vssrl_vx_u32m1(__VA_ARGS__) |
| #define vssrl_vx_u32m1_m | ( | ... | ) | __riscv_vssrl_vx_u32m1_tumu(__VA_ARGS__) |
| #define vssrl_vx_u32m2 | ( | ... | ) | __riscv_vssrl_vx_u32m2(__VA_ARGS__) |
| #define vssrl_vx_u32m2_m | ( | ... | ) | __riscv_vssrl_vx_u32m2_tumu(__VA_ARGS__) |
| #define vssrl_vx_u32m4 | ( | ... | ) | __riscv_vssrl_vx_u32m4(__VA_ARGS__) |
| #define vssrl_vx_u32m4_m | ( | ... | ) | __riscv_vssrl_vx_u32m4_tumu(__VA_ARGS__) |
| #define vssrl_vx_u32m8 | ( | ... | ) | __riscv_vssrl_vx_u32m8(__VA_ARGS__) |
| #define vssrl_vx_u32m8_m | ( | ... | ) | __riscv_vssrl_vx_u32m8_tumu(__VA_ARGS__) |
| #define vssrl_vx_u32mf2 | ( | ... | ) | __riscv_vssrl_vx_u32mf2(__VA_ARGS__) |
| #define vssrl_vx_u32mf2_m | ( | ... | ) | __riscv_vssrl_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vssrl_vx_u64m1 | ( | ... | ) | __riscv_vssrl_vx_u64m1(__VA_ARGS__) |
| #define vssrl_vx_u64m1_m | ( | ... | ) | __riscv_vssrl_vx_u64m1_tumu(__VA_ARGS__) |
| #define vssrl_vx_u64m2 | ( | ... | ) | __riscv_vssrl_vx_u64m2(__VA_ARGS__) |
| #define vssrl_vx_u64m2_m | ( | ... | ) | __riscv_vssrl_vx_u64m2_tumu(__VA_ARGS__) |
| #define vssrl_vx_u64m4 | ( | ... | ) | __riscv_vssrl_vx_u64m4(__VA_ARGS__) |
| #define vssrl_vx_u64m4_m | ( | ... | ) | __riscv_vssrl_vx_u64m4_tumu(__VA_ARGS__) |
| #define vssrl_vx_u64m8 | ( | ... | ) | __riscv_vssrl_vx_u64m8(__VA_ARGS__) |
| #define vssrl_vx_u64m8_m | ( | ... | ) | __riscv_vssrl_vx_u64m8_tumu(__VA_ARGS__) |
| #define vssrl_vx_u8m1 | ( | ... | ) | __riscv_vssrl_vx_u8m1(__VA_ARGS__) |
| #define vssrl_vx_u8m1_m | ( | ... | ) | __riscv_vssrl_vx_u8m1_tumu(__VA_ARGS__) |
| #define vssrl_vx_u8m2 | ( | ... | ) | __riscv_vssrl_vx_u8m2(__VA_ARGS__) |
| #define vssrl_vx_u8m2_m | ( | ... | ) | __riscv_vssrl_vx_u8m2_tumu(__VA_ARGS__) |
| #define vssrl_vx_u8m4 | ( | ... | ) | __riscv_vssrl_vx_u8m4(__VA_ARGS__) |
| #define vssrl_vx_u8m4_m | ( | ... | ) | __riscv_vssrl_vx_u8m4_tumu(__VA_ARGS__) |
| #define vssrl_vx_u8m8 | ( | ... | ) | __riscv_vssrl_vx_u8m8(__VA_ARGS__) |
| #define vssrl_vx_u8m8_m | ( | ... | ) | __riscv_vssrl_vx_u8m8_tumu(__VA_ARGS__) |
| #define vssrl_vx_u8mf2 | ( | ... | ) | __riscv_vssrl_vx_u8mf2(__VA_ARGS__) |
| #define vssrl_vx_u8mf2_m | ( | ... | ) | __riscv_vssrl_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vssrl_vx_u8mf4 | ( | ... | ) | __riscv_vssrl_vx_u8mf4(__VA_ARGS__) |
| #define vssrl_vx_u8mf4_m | ( | ... | ) | __riscv_vssrl_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vssrl_vx_u8mf8 | ( | ... | ) | __riscv_vssrl_vx_u8mf8(__VA_ARGS__) |
| #define vssrl_vx_u8mf8_m | ( | ... | ) | __riscv_vssrl_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vssseg2e16_v_f16m1 | ( | ... | ) | __riscv_vssseg2e16_v_f16m1(__VA_ARGS__) |
| #define vssseg2e16_v_f16m1_m | ( | ... | ) | __riscv_vssseg2e16_v_f16m1_m(__VA_ARGS__) |
| #define vssseg2e16_v_f16m2 | ( | ... | ) | __riscv_vssseg2e16_v_f16m2(__VA_ARGS__) |
| #define vssseg2e16_v_f16m2_m | ( | ... | ) | __riscv_vssseg2e16_v_f16m2_m(__VA_ARGS__) |
| #define vssseg2e16_v_f16m4 | ( | ... | ) | __riscv_vssseg2e16_v_f16m4(__VA_ARGS__) |
| #define vssseg2e16_v_f16m4_m | ( | ... | ) | __riscv_vssseg2e16_v_f16m4_m(__VA_ARGS__) |
| #define vssseg2e16_v_f16mf2 | ( | ... | ) | __riscv_vssseg2e16_v_f16mf2(__VA_ARGS__) |
| #define vssseg2e16_v_f16mf2_m | ( | ... | ) | __riscv_vssseg2e16_v_f16mf2_m(__VA_ARGS__) |
| #define vssseg2e16_v_f16mf4 | ( | ... | ) | __riscv_vssseg2e16_v_f16mf4(__VA_ARGS__) |
| #define vssseg2e16_v_f16mf4_m | ( | ... | ) | __riscv_vssseg2e16_v_f16mf4_m(__VA_ARGS__) |
| #define vssseg2e16_v_i16m1 | ( | ... | ) | __riscv_vssseg2e16_v_i16m1(__VA_ARGS__) |
| #define vssseg2e16_v_i16m1_m | ( | ... | ) | __riscv_vssseg2e16_v_i16m1_m(__VA_ARGS__) |
| #define vssseg2e16_v_i16m2 | ( | ... | ) | __riscv_vssseg2e16_v_i16m2(__VA_ARGS__) |
| #define vssseg2e16_v_i16m2_m | ( | ... | ) | __riscv_vssseg2e16_v_i16m2_m(__VA_ARGS__) |
| #define vssseg2e16_v_i16m4 | ( | ... | ) | __riscv_vssseg2e16_v_i16m4(__VA_ARGS__) |
| #define vssseg2e16_v_i16m4_m | ( | ... | ) | __riscv_vssseg2e16_v_i16m4_m(__VA_ARGS__) |
| #define vssseg2e16_v_i16mf2 | ( | ... | ) | __riscv_vssseg2e16_v_i16mf2(__VA_ARGS__) |
| #define vssseg2e16_v_i16mf2_m | ( | ... | ) | __riscv_vssseg2e16_v_i16mf2_m(__VA_ARGS__) |
| #define vssseg2e16_v_i16mf4 | ( | ... | ) | __riscv_vssseg2e16_v_i16mf4(__VA_ARGS__) |
| #define vssseg2e16_v_i16mf4_m | ( | ... | ) | __riscv_vssseg2e16_v_i16mf4_m(__VA_ARGS__) |
| #define vssseg2e16_v_u16m1 | ( | ... | ) | __riscv_vssseg2e16_v_u16m1(__VA_ARGS__) |
| #define vssseg2e16_v_u16m1_m | ( | ... | ) | __riscv_vssseg2e16_v_u16m1_m(__VA_ARGS__) |
| #define vssseg2e16_v_u16m2 | ( | ... | ) | __riscv_vssseg2e16_v_u16m2(__VA_ARGS__) |
| #define vssseg2e16_v_u16m2_m | ( | ... | ) | __riscv_vssseg2e16_v_u16m2_m(__VA_ARGS__) |
| #define vssseg2e16_v_u16m4 | ( | ... | ) | __riscv_vssseg2e16_v_u16m4(__VA_ARGS__) |
| #define vssseg2e16_v_u16m4_m | ( | ... | ) | __riscv_vssseg2e16_v_u16m4_m(__VA_ARGS__) |
| #define vssseg2e16_v_u16mf2 | ( | ... | ) | __riscv_vssseg2e16_v_u16mf2(__VA_ARGS__) |
| #define vssseg2e16_v_u16mf2_m | ( | ... | ) | __riscv_vssseg2e16_v_u16mf2_m(__VA_ARGS__) |
| #define vssseg2e16_v_u16mf4 | ( | ... | ) | __riscv_vssseg2e16_v_u16mf4(__VA_ARGS__) |
| #define vssseg2e16_v_u16mf4_m | ( | ... | ) | __riscv_vssseg2e16_v_u16mf4_m(__VA_ARGS__) |
| #define vssseg2e32_v_f32m1 | ( | ... | ) | __riscv_vssseg2e32_v_f32m1(__VA_ARGS__) |
| #define vssseg2e32_v_f32m1_m | ( | ... | ) | __riscv_vssseg2e32_v_f32m1_m(__VA_ARGS__) |
| #define vssseg2e32_v_f32m2 | ( | ... | ) | __riscv_vssseg2e32_v_f32m2(__VA_ARGS__) |
| #define vssseg2e32_v_f32m2_m | ( | ... | ) | __riscv_vssseg2e32_v_f32m2_m(__VA_ARGS__) |
| #define vssseg2e32_v_f32m4 | ( | ... | ) | __riscv_vssseg2e32_v_f32m4(__VA_ARGS__) |
| #define vssseg2e32_v_f32m4_m | ( | ... | ) | __riscv_vssseg2e32_v_f32m4_m(__VA_ARGS__) |
| #define vssseg2e32_v_f32mf2 | ( | ... | ) | __riscv_vssseg2e32_v_f32mf2(__VA_ARGS__) |
| #define vssseg2e32_v_f32mf2_m | ( | ... | ) | __riscv_vssseg2e32_v_f32mf2_m(__VA_ARGS__) |
| #define vssseg2e32_v_i32m1 | ( | ... | ) | __riscv_vssseg2e32_v_i32m1(__VA_ARGS__) |
| #define vssseg2e32_v_i32m1_m | ( | ... | ) | __riscv_vssseg2e32_v_i32m1_m(__VA_ARGS__) |
| #define vssseg2e32_v_i32m2 | ( | ... | ) | __riscv_vssseg2e32_v_i32m2(__VA_ARGS__) |
| #define vssseg2e32_v_i32m2_m | ( | ... | ) | __riscv_vssseg2e32_v_i32m2_m(__VA_ARGS__) |
| #define vssseg2e32_v_i32m4 | ( | ... | ) | __riscv_vssseg2e32_v_i32m4(__VA_ARGS__) |
| #define vssseg2e32_v_i32m4_m | ( | ... | ) | __riscv_vssseg2e32_v_i32m4_m(__VA_ARGS__) |
| #define vssseg2e32_v_i32mf2 | ( | ... | ) | __riscv_vssseg2e32_v_i32mf2(__VA_ARGS__) |
| #define vssseg2e32_v_i32mf2_m | ( | ... | ) | __riscv_vssseg2e32_v_i32mf2_m(__VA_ARGS__) |
| #define vssseg2e32_v_u32m1 | ( | ... | ) | __riscv_vssseg2e32_v_u32m1(__VA_ARGS__) |
| #define vssseg2e32_v_u32m1_m | ( | ... | ) | __riscv_vssseg2e32_v_u32m1_m(__VA_ARGS__) |
| #define vssseg2e32_v_u32m2 | ( | ... | ) | __riscv_vssseg2e32_v_u32m2(__VA_ARGS__) |
| #define vssseg2e32_v_u32m2_m | ( | ... | ) | __riscv_vssseg2e32_v_u32m2_m(__VA_ARGS__) |
| #define vssseg2e32_v_u32m4 | ( | ... | ) | __riscv_vssseg2e32_v_u32m4(__VA_ARGS__) |
| #define vssseg2e32_v_u32m4_m | ( | ... | ) | __riscv_vssseg2e32_v_u32m4_m(__VA_ARGS__) |
| #define vssseg2e32_v_u32mf2 | ( | ... | ) | __riscv_vssseg2e32_v_u32mf2(__VA_ARGS__) |
| #define vssseg2e32_v_u32mf2_m | ( | ... | ) | __riscv_vssseg2e32_v_u32mf2_m(__VA_ARGS__) |
| #define vssseg2e64_v_f64m1 | ( | ... | ) | __riscv_vssseg2e64_v_f64m1(__VA_ARGS__) |
| #define vssseg2e64_v_f64m1_m | ( | ... | ) | __riscv_vssseg2e64_v_f64m1_m(__VA_ARGS__) |
| #define vssseg2e64_v_f64m2 | ( | ... | ) | __riscv_vssseg2e64_v_f64m2(__VA_ARGS__) |
| #define vssseg2e64_v_f64m2_m | ( | ... | ) | __riscv_vssseg2e64_v_f64m2_m(__VA_ARGS__) |
| #define vssseg2e64_v_f64m4 | ( | ... | ) | __riscv_vssseg2e64_v_f64m4(__VA_ARGS__) |
| #define vssseg2e64_v_f64m4_m | ( | ... | ) | __riscv_vssseg2e64_v_f64m4_m(__VA_ARGS__) |
| #define vssseg2e64_v_i64m1 | ( | ... | ) | __riscv_vssseg2e64_v_i64m1(__VA_ARGS__) |
| #define vssseg2e64_v_i64m1_m | ( | ... | ) | __riscv_vssseg2e64_v_i64m1_m(__VA_ARGS__) |
| #define vssseg2e64_v_i64m2 | ( | ... | ) | __riscv_vssseg2e64_v_i64m2(__VA_ARGS__) |
| #define vssseg2e64_v_i64m2_m | ( | ... | ) | __riscv_vssseg2e64_v_i64m2_m(__VA_ARGS__) |
| #define vssseg2e64_v_i64m4 | ( | ... | ) | __riscv_vssseg2e64_v_i64m4(__VA_ARGS__) |
| #define vssseg2e64_v_i64m4_m | ( | ... | ) | __riscv_vssseg2e64_v_i64m4_m(__VA_ARGS__) |
| #define vssseg2e64_v_u64m1 | ( | ... | ) | __riscv_vssseg2e64_v_u64m1(__VA_ARGS__) |
| #define vssseg2e64_v_u64m1_m | ( | ... | ) | __riscv_vssseg2e64_v_u64m1_m(__VA_ARGS__) |
| #define vssseg2e64_v_u64m2 | ( | ... | ) | __riscv_vssseg2e64_v_u64m2(__VA_ARGS__) |
| #define vssseg2e64_v_u64m2_m | ( | ... | ) | __riscv_vssseg2e64_v_u64m2_m(__VA_ARGS__) |
| #define vssseg2e64_v_u64m4 | ( | ... | ) | __riscv_vssseg2e64_v_u64m4(__VA_ARGS__) |
| #define vssseg2e64_v_u64m4_m | ( | ... | ) | __riscv_vssseg2e64_v_u64m4_m(__VA_ARGS__) |
| #define vssseg2e8_v_i8m1 | ( | ... | ) | __riscv_vssseg2e8_v_i8m1(__VA_ARGS__) |
| #define vssseg2e8_v_i8m1_m | ( | ... | ) | __riscv_vssseg2e8_v_i8m1_m(__VA_ARGS__) |
| #define vssseg2e8_v_i8m2 | ( | ... | ) | __riscv_vssseg2e8_v_i8m2(__VA_ARGS__) |
| #define vssseg2e8_v_i8m2_m | ( | ... | ) | __riscv_vssseg2e8_v_i8m2_m(__VA_ARGS__) |
| #define vssseg2e8_v_i8m4 | ( | ... | ) | __riscv_vssseg2e8_v_i8m4(__VA_ARGS__) |
| #define vssseg2e8_v_i8m4_m | ( | ... | ) | __riscv_vssseg2e8_v_i8m4_m(__VA_ARGS__) |
| #define vssseg2e8_v_i8mf2 | ( | ... | ) | __riscv_vssseg2e8_v_i8mf2(__VA_ARGS__) |
| #define vssseg2e8_v_i8mf2_m | ( | ... | ) | __riscv_vssseg2e8_v_i8mf2_m(__VA_ARGS__) |
| #define vssseg2e8_v_i8mf4 | ( | ... | ) | __riscv_vssseg2e8_v_i8mf4(__VA_ARGS__) |
| #define vssseg2e8_v_i8mf4_m | ( | ... | ) | __riscv_vssseg2e8_v_i8mf4_m(__VA_ARGS__) |
| #define vssseg2e8_v_i8mf8 | ( | ... | ) | __riscv_vssseg2e8_v_i8mf8(__VA_ARGS__) |
| #define vssseg2e8_v_i8mf8_m | ( | ... | ) | __riscv_vssseg2e8_v_i8mf8_m(__VA_ARGS__) |
| #define vssseg2e8_v_u8m1 | ( | ... | ) | __riscv_vssseg2e8_v_u8m1(__VA_ARGS__) |
| #define vssseg2e8_v_u8m1_m | ( | ... | ) | __riscv_vssseg2e8_v_u8m1_m(__VA_ARGS__) |
| #define vssseg2e8_v_u8m2 | ( | ... | ) | __riscv_vssseg2e8_v_u8m2(__VA_ARGS__) |
| #define vssseg2e8_v_u8m2_m | ( | ... | ) | __riscv_vssseg2e8_v_u8m2_m(__VA_ARGS__) |
| #define vssseg2e8_v_u8m4 | ( | ... | ) | __riscv_vssseg2e8_v_u8m4(__VA_ARGS__) |
| #define vssseg2e8_v_u8m4_m | ( | ... | ) | __riscv_vssseg2e8_v_u8m4_m(__VA_ARGS__) |
| #define vssseg2e8_v_u8mf2 | ( | ... | ) | __riscv_vssseg2e8_v_u8mf2(__VA_ARGS__) |
| #define vssseg2e8_v_u8mf2_m | ( | ... | ) | __riscv_vssseg2e8_v_u8mf2_m(__VA_ARGS__) |
| #define vssseg2e8_v_u8mf4 | ( | ... | ) | __riscv_vssseg2e8_v_u8mf4(__VA_ARGS__) |
| #define vssseg2e8_v_u8mf4_m | ( | ... | ) | __riscv_vssseg2e8_v_u8mf4_m(__VA_ARGS__) |
| #define vssseg2e8_v_u8mf8 | ( | ... | ) | __riscv_vssseg2e8_v_u8mf8(__VA_ARGS__) |
| #define vssseg2e8_v_u8mf8_m | ( | ... | ) | __riscv_vssseg2e8_v_u8mf8_m(__VA_ARGS__) |
| #define vssseg3e16_v_f16m1 | ( | ... | ) | __riscv_vssseg3e16_v_f16m1(__VA_ARGS__) |
| #define vssseg3e16_v_f16m1_m | ( | ... | ) | __riscv_vssseg3e16_v_f16m1_m(__VA_ARGS__) |
| #define vssseg3e16_v_f16m2 | ( | ... | ) | __riscv_vssseg3e16_v_f16m2(__VA_ARGS__) |
| #define vssseg3e16_v_f16m2_m | ( | ... | ) | __riscv_vssseg3e16_v_f16m2_m(__VA_ARGS__) |
| #define vssseg3e16_v_f16mf2 | ( | ... | ) | __riscv_vssseg3e16_v_f16mf2(__VA_ARGS__) |
| #define vssseg3e16_v_f16mf2_m | ( | ... | ) | __riscv_vssseg3e16_v_f16mf2_m(__VA_ARGS__) |
| #define vssseg3e16_v_f16mf4 | ( | ... | ) | __riscv_vssseg3e16_v_f16mf4(__VA_ARGS__) |
| #define vssseg3e16_v_f16mf4_m | ( | ... | ) | __riscv_vssseg3e16_v_f16mf4_m(__VA_ARGS__) |
| #define vssseg3e16_v_i16m1 | ( | ... | ) | __riscv_vssseg3e16_v_i16m1(__VA_ARGS__) |
| #define vssseg3e16_v_i16m1_m | ( | ... | ) | __riscv_vssseg3e16_v_i16m1_m(__VA_ARGS__) |
| #define vssseg3e16_v_i16m2 | ( | ... | ) | __riscv_vssseg3e16_v_i16m2(__VA_ARGS__) |
| #define vssseg3e16_v_i16m2_m | ( | ... | ) | __riscv_vssseg3e16_v_i16m2_m(__VA_ARGS__) |
| #define vssseg3e16_v_i16mf2 | ( | ... | ) | __riscv_vssseg3e16_v_i16mf2(__VA_ARGS__) |
| #define vssseg3e16_v_i16mf2_m | ( | ... | ) | __riscv_vssseg3e16_v_i16mf2_m(__VA_ARGS__) |
| #define vssseg3e16_v_i16mf4 | ( | ... | ) | __riscv_vssseg3e16_v_i16mf4(__VA_ARGS__) |
| #define vssseg3e16_v_i16mf4_m | ( | ... | ) | __riscv_vssseg3e16_v_i16mf4_m(__VA_ARGS__) |
| #define vssseg3e16_v_u16m1 | ( | ... | ) | __riscv_vssseg3e16_v_u16m1(__VA_ARGS__) |
| #define vssseg3e16_v_u16m1_m | ( | ... | ) | __riscv_vssseg3e16_v_u16m1_m(__VA_ARGS__) |
| #define vssseg3e16_v_u16m2 | ( | ... | ) | __riscv_vssseg3e16_v_u16m2(__VA_ARGS__) |
| #define vssseg3e16_v_u16m2_m | ( | ... | ) | __riscv_vssseg3e16_v_u16m2_m(__VA_ARGS__) |
| #define vssseg3e16_v_u16mf2 | ( | ... | ) | __riscv_vssseg3e16_v_u16mf2(__VA_ARGS__) |
| #define vssseg3e16_v_u16mf2_m | ( | ... | ) | __riscv_vssseg3e16_v_u16mf2_m(__VA_ARGS__) |
| #define vssseg3e16_v_u16mf4 | ( | ... | ) | __riscv_vssseg3e16_v_u16mf4(__VA_ARGS__) |
| #define vssseg3e16_v_u16mf4_m | ( | ... | ) | __riscv_vssseg3e16_v_u16mf4_m(__VA_ARGS__) |
| #define vssseg3e32_v_f32m1 | ( | ... | ) | __riscv_vssseg3e32_v_f32m1(__VA_ARGS__) |
| #define vssseg3e32_v_f32m1_m | ( | ... | ) | __riscv_vssseg3e32_v_f32m1_m(__VA_ARGS__) |
| #define vssseg3e32_v_f32m2 | ( | ... | ) | __riscv_vssseg3e32_v_f32m2(__VA_ARGS__) |
| #define vssseg3e32_v_f32m2_m | ( | ... | ) | __riscv_vssseg3e32_v_f32m2_m(__VA_ARGS__) |
| #define vssseg3e32_v_f32mf2 | ( | ... | ) | __riscv_vssseg3e32_v_f32mf2(__VA_ARGS__) |
| #define vssseg3e32_v_f32mf2_m | ( | ... | ) | __riscv_vssseg3e32_v_f32mf2_m(__VA_ARGS__) |
| #define vssseg3e32_v_i32m1 | ( | ... | ) | __riscv_vssseg3e32_v_i32m1(__VA_ARGS__) |
| #define vssseg3e32_v_i32m1_m | ( | ... | ) | __riscv_vssseg3e32_v_i32m1_m(__VA_ARGS__) |
| #define vssseg3e32_v_i32m2 | ( | ... | ) | __riscv_vssseg3e32_v_i32m2(__VA_ARGS__) |
| #define vssseg3e32_v_i32m2_m | ( | ... | ) | __riscv_vssseg3e32_v_i32m2_m(__VA_ARGS__) |
| #define vssseg3e32_v_i32mf2 | ( | ... | ) | __riscv_vssseg3e32_v_i32mf2(__VA_ARGS__) |
| #define vssseg3e32_v_i32mf2_m | ( | ... | ) | __riscv_vssseg3e32_v_i32mf2_m(__VA_ARGS__) |
| #define vssseg3e32_v_u32m1 | ( | ... | ) | __riscv_vssseg3e32_v_u32m1(__VA_ARGS__) |
| #define vssseg3e32_v_u32m1_m | ( | ... | ) | __riscv_vssseg3e32_v_u32m1_m(__VA_ARGS__) |
| #define vssseg3e32_v_u32m2 | ( | ... | ) | __riscv_vssseg3e32_v_u32m2(__VA_ARGS__) |
| #define vssseg3e32_v_u32m2_m | ( | ... | ) | __riscv_vssseg3e32_v_u32m2_m(__VA_ARGS__) |
| #define vssseg3e32_v_u32mf2 | ( | ... | ) | __riscv_vssseg3e32_v_u32mf2(__VA_ARGS__) |
| #define vssseg3e32_v_u32mf2_m | ( | ... | ) | __riscv_vssseg3e32_v_u32mf2_m(__VA_ARGS__) |
| #define vssseg3e64_v_f64m1 | ( | ... | ) | __riscv_vssseg3e64_v_f64m1(__VA_ARGS__) |
| #define vssseg3e64_v_f64m1_m | ( | ... | ) | __riscv_vssseg3e64_v_f64m1_m(__VA_ARGS__) |
| #define vssseg3e64_v_f64m2 | ( | ... | ) | __riscv_vssseg3e64_v_f64m2(__VA_ARGS__) |
| #define vssseg3e64_v_f64m2_m | ( | ... | ) | __riscv_vssseg3e64_v_f64m2_m(__VA_ARGS__) |
| #define vssseg3e64_v_i64m1 | ( | ... | ) | __riscv_vssseg3e64_v_i64m1(__VA_ARGS__) |
| #define vssseg3e64_v_i64m1_m | ( | ... | ) | __riscv_vssseg3e64_v_i64m1_m(__VA_ARGS__) |
| #define vssseg3e64_v_i64m2 | ( | ... | ) | __riscv_vssseg3e64_v_i64m2(__VA_ARGS__) |
| #define vssseg3e64_v_i64m2_m | ( | ... | ) | __riscv_vssseg3e64_v_i64m2_m(__VA_ARGS__) |
| #define vssseg3e64_v_u64m1 | ( | ... | ) | __riscv_vssseg3e64_v_u64m1(__VA_ARGS__) |
| #define vssseg3e64_v_u64m1_m | ( | ... | ) | __riscv_vssseg3e64_v_u64m1_m(__VA_ARGS__) |
| #define vssseg3e64_v_u64m2 | ( | ... | ) | __riscv_vssseg3e64_v_u64m2(__VA_ARGS__) |
| #define vssseg3e64_v_u64m2_m | ( | ... | ) | __riscv_vssseg3e64_v_u64m2_m(__VA_ARGS__) |
| #define vssseg3e8_v_i8m1 | ( | ... | ) | __riscv_vssseg3e8_v_i8m1(__VA_ARGS__) |
| #define vssseg3e8_v_i8m1_m | ( | ... | ) | __riscv_vssseg3e8_v_i8m1_m(__VA_ARGS__) |
| #define vssseg3e8_v_i8m2 | ( | ... | ) | __riscv_vssseg3e8_v_i8m2(__VA_ARGS__) |
| #define vssseg3e8_v_i8m2_m | ( | ... | ) | __riscv_vssseg3e8_v_i8m2_m(__VA_ARGS__) |
| #define vssseg3e8_v_i8mf2 | ( | ... | ) | __riscv_vssseg3e8_v_i8mf2(__VA_ARGS__) |
| #define vssseg3e8_v_i8mf2_m | ( | ... | ) | __riscv_vssseg3e8_v_i8mf2_m(__VA_ARGS__) |
| #define vssseg3e8_v_i8mf4 | ( | ... | ) | __riscv_vssseg3e8_v_i8mf4(__VA_ARGS__) |
| #define vssseg3e8_v_i8mf4_m | ( | ... | ) | __riscv_vssseg3e8_v_i8mf4_m(__VA_ARGS__) |
| #define vssseg3e8_v_i8mf8 | ( | ... | ) | __riscv_vssseg3e8_v_i8mf8(__VA_ARGS__) |
| #define vssseg3e8_v_i8mf8_m | ( | ... | ) | __riscv_vssseg3e8_v_i8mf8_m(__VA_ARGS__) |
| #define vssseg3e8_v_u8m1 | ( | ... | ) | __riscv_vssseg3e8_v_u8m1(__VA_ARGS__) |
| #define vssseg3e8_v_u8m1_m | ( | ... | ) | __riscv_vssseg3e8_v_u8m1_m(__VA_ARGS__) |
| #define vssseg3e8_v_u8m2 | ( | ... | ) | __riscv_vssseg3e8_v_u8m2(__VA_ARGS__) |
| #define vssseg3e8_v_u8m2_m | ( | ... | ) | __riscv_vssseg3e8_v_u8m2_m(__VA_ARGS__) |
| #define vssseg3e8_v_u8mf2 | ( | ... | ) | __riscv_vssseg3e8_v_u8mf2(__VA_ARGS__) |
| #define vssseg3e8_v_u8mf2_m | ( | ... | ) | __riscv_vssseg3e8_v_u8mf2_m(__VA_ARGS__) |
| #define vssseg3e8_v_u8mf4 | ( | ... | ) | __riscv_vssseg3e8_v_u8mf4(__VA_ARGS__) |
| #define vssseg3e8_v_u8mf4_m | ( | ... | ) | __riscv_vssseg3e8_v_u8mf4_m(__VA_ARGS__) |
| #define vssseg3e8_v_u8mf8 | ( | ... | ) | __riscv_vssseg3e8_v_u8mf8(__VA_ARGS__) |
| #define vssseg3e8_v_u8mf8_m | ( | ... | ) | __riscv_vssseg3e8_v_u8mf8_m(__VA_ARGS__) |
| #define vssseg4e16_v_f16m1 | ( | ... | ) | __riscv_vssseg4e16_v_f16m1(__VA_ARGS__) |
| #define vssseg4e16_v_f16m1_m | ( | ... | ) | __riscv_vssseg4e16_v_f16m1_m(__VA_ARGS__) |
| #define vssseg4e16_v_f16m2 | ( | ... | ) | __riscv_vssseg4e16_v_f16m2(__VA_ARGS__) |
| #define vssseg4e16_v_f16m2_m | ( | ... | ) | __riscv_vssseg4e16_v_f16m2_m(__VA_ARGS__) |
| #define vssseg4e16_v_f16mf2 | ( | ... | ) | __riscv_vssseg4e16_v_f16mf2(__VA_ARGS__) |
| #define vssseg4e16_v_f16mf2_m | ( | ... | ) | __riscv_vssseg4e16_v_f16mf2_m(__VA_ARGS__) |
| #define vssseg4e16_v_f16mf4 | ( | ... | ) | __riscv_vssseg4e16_v_f16mf4(__VA_ARGS__) |
| #define vssseg4e16_v_f16mf4_m | ( | ... | ) | __riscv_vssseg4e16_v_f16mf4_m(__VA_ARGS__) |
| #define vssseg4e16_v_i16m1 | ( | ... | ) | __riscv_vssseg4e16_v_i16m1(__VA_ARGS__) |
| #define vssseg4e16_v_i16m1_m | ( | ... | ) | __riscv_vssseg4e16_v_i16m1_m(__VA_ARGS__) |
| #define vssseg4e16_v_i16m2 | ( | ... | ) | __riscv_vssseg4e16_v_i16m2(__VA_ARGS__) |
| #define vssseg4e16_v_i16m2_m | ( | ... | ) | __riscv_vssseg4e16_v_i16m2_m(__VA_ARGS__) |
| #define vssseg4e16_v_i16mf2 | ( | ... | ) | __riscv_vssseg4e16_v_i16mf2(__VA_ARGS__) |
| #define vssseg4e16_v_i16mf2_m | ( | ... | ) | __riscv_vssseg4e16_v_i16mf2_m(__VA_ARGS__) |
| #define vssseg4e16_v_i16mf4 | ( | ... | ) | __riscv_vssseg4e16_v_i16mf4(__VA_ARGS__) |
| #define vssseg4e16_v_i16mf4_m | ( | ... | ) | __riscv_vssseg4e16_v_i16mf4_m(__VA_ARGS__) |
| #define vssseg4e16_v_u16m1 | ( | ... | ) | __riscv_vssseg4e16_v_u16m1(__VA_ARGS__) |
| #define vssseg4e16_v_u16m1_m | ( | ... | ) | __riscv_vssseg4e16_v_u16m1_m(__VA_ARGS__) |
| #define vssseg4e16_v_u16m2 | ( | ... | ) | __riscv_vssseg4e16_v_u16m2(__VA_ARGS__) |
| #define vssseg4e16_v_u16m2_m | ( | ... | ) | __riscv_vssseg4e16_v_u16m2_m(__VA_ARGS__) |
| #define vssseg4e16_v_u16mf2 | ( | ... | ) | __riscv_vssseg4e16_v_u16mf2(__VA_ARGS__) |
| #define vssseg4e16_v_u16mf2_m | ( | ... | ) | __riscv_vssseg4e16_v_u16mf2_m(__VA_ARGS__) |
| #define vssseg4e16_v_u16mf4 | ( | ... | ) | __riscv_vssseg4e16_v_u16mf4(__VA_ARGS__) |
| #define vssseg4e16_v_u16mf4_m | ( | ... | ) | __riscv_vssseg4e16_v_u16mf4_m(__VA_ARGS__) |
| #define vssseg4e32_v_f32m1 | ( | ... | ) | __riscv_vssseg4e32_v_f32m1(__VA_ARGS__) |
| #define vssseg4e32_v_f32m1_m | ( | ... | ) | __riscv_vssseg4e32_v_f32m1_m(__VA_ARGS__) |
| #define vssseg4e32_v_f32m2 | ( | ... | ) | __riscv_vssseg4e32_v_f32m2(__VA_ARGS__) |
| #define vssseg4e32_v_f32m2_m | ( | ... | ) | __riscv_vssseg4e32_v_f32m2_m(__VA_ARGS__) |
| #define vssseg4e32_v_f32mf2 | ( | ... | ) | __riscv_vssseg4e32_v_f32mf2(__VA_ARGS__) |
| #define vssseg4e32_v_f32mf2_m | ( | ... | ) | __riscv_vssseg4e32_v_f32mf2_m(__VA_ARGS__) |
| #define vssseg4e32_v_i32m1 | ( | ... | ) | __riscv_vssseg4e32_v_i32m1(__VA_ARGS__) |
| #define vssseg4e32_v_i32m1_m | ( | ... | ) | __riscv_vssseg4e32_v_i32m1_m(__VA_ARGS__) |
| #define vssseg4e32_v_i32m2 | ( | ... | ) | __riscv_vssseg4e32_v_i32m2(__VA_ARGS__) |
| #define vssseg4e32_v_i32m2_m | ( | ... | ) | __riscv_vssseg4e32_v_i32m2_m(__VA_ARGS__) |
| #define vssseg4e32_v_i32mf2 | ( | ... | ) | __riscv_vssseg4e32_v_i32mf2(__VA_ARGS__) |
| #define vssseg4e32_v_i32mf2_m | ( | ... | ) | __riscv_vssseg4e32_v_i32mf2_m(__VA_ARGS__) |
| #define vssseg4e32_v_u32m1 | ( | ... | ) | __riscv_vssseg4e32_v_u32m1(__VA_ARGS__) |
| #define vssseg4e32_v_u32m1_m | ( | ... | ) | __riscv_vssseg4e32_v_u32m1_m(__VA_ARGS__) |
| #define vssseg4e32_v_u32m2 | ( | ... | ) | __riscv_vssseg4e32_v_u32m2(__VA_ARGS__) |
| #define vssseg4e32_v_u32m2_m | ( | ... | ) | __riscv_vssseg4e32_v_u32m2_m(__VA_ARGS__) |
| #define vssseg4e32_v_u32mf2 | ( | ... | ) | __riscv_vssseg4e32_v_u32mf2(__VA_ARGS__) |
| #define vssseg4e32_v_u32mf2_m | ( | ... | ) | __riscv_vssseg4e32_v_u32mf2_m(__VA_ARGS__) |
| #define vssseg4e64_v_f64m1 | ( | ... | ) | __riscv_vssseg4e64_v_f64m1(__VA_ARGS__) |
| #define vssseg4e64_v_f64m1_m | ( | ... | ) | __riscv_vssseg4e64_v_f64m1_m(__VA_ARGS__) |
| #define vssseg4e64_v_f64m2 | ( | ... | ) | __riscv_vssseg4e64_v_f64m2(__VA_ARGS__) |
| #define vssseg4e64_v_f64m2_m | ( | ... | ) | __riscv_vssseg4e64_v_f64m2_m(__VA_ARGS__) |
| #define vssseg4e64_v_i64m1 | ( | ... | ) | __riscv_vssseg4e64_v_i64m1(__VA_ARGS__) |
| #define vssseg4e64_v_i64m1_m | ( | ... | ) | __riscv_vssseg4e64_v_i64m1_m(__VA_ARGS__) |
| #define vssseg4e64_v_i64m2 | ( | ... | ) | __riscv_vssseg4e64_v_i64m2(__VA_ARGS__) |
| #define vssseg4e64_v_i64m2_m | ( | ... | ) | __riscv_vssseg4e64_v_i64m2_m(__VA_ARGS__) |
| #define vssseg4e64_v_u64m1 | ( | ... | ) | __riscv_vssseg4e64_v_u64m1(__VA_ARGS__) |
| #define vssseg4e64_v_u64m1_m | ( | ... | ) | __riscv_vssseg4e64_v_u64m1_m(__VA_ARGS__) |
| #define vssseg4e64_v_u64m2 | ( | ... | ) | __riscv_vssseg4e64_v_u64m2(__VA_ARGS__) |
| #define vssseg4e64_v_u64m2_m | ( | ... | ) | __riscv_vssseg4e64_v_u64m2_m(__VA_ARGS__) |
| #define vssseg4e8_v_i8m1 | ( | ... | ) | __riscv_vssseg4e8_v_i8m1(__VA_ARGS__) |
| #define vssseg4e8_v_i8m1_m | ( | ... | ) | __riscv_vssseg4e8_v_i8m1_m(__VA_ARGS__) |
| #define vssseg4e8_v_i8m2 | ( | ... | ) | __riscv_vssseg4e8_v_i8m2(__VA_ARGS__) |
| #define vssseg4e8_v_i8m2_m | ( | ... | ) | __riscv_vssseg4e8_v_i8m2_m(__VA_ARGS__) |
| #define vssseg4e8_v_i8mf2 | ( | ... | ) | __riscv_vssseg4e8_v_i8mf2(__VA_ARGS__) |
| #define vssseg4e8_v_i8mf2_m | ( | ... | ) | __riscv_vssseg4e8_v_i8mf2_m(__VA_ARGS__) |
| #define vssseg4e8_v_i8mf4 | ( | ... | ) | __riscv_vssseg4e8_v_i8mf4(__VA_ARGS__) |
| #define vssseg4e8_v_i8mf4_m | ( | ... | ) | __riscv_vssseg4e8_v_i8mf4_m(__VA_ARGS__) |
| #define vssseg4e8_v_i8mf8 | ( | ... | ) | __riscv_vssseg4e8_v_i8mf8(__VA_ARGS__) |
| #define vssseg4e8_v_i8mf8_m | ( | ... | ) | __riscv_vssseg4e8_v_i8mf8_m(__VA_ARGS__) |
| #define vssseg4e8_v_u8m1 | ( | ... | ) | __riscv_vssseg4e8_v_u8m1(__VA_ARGS__) |
| #define vssseg4e8_v_u8m1_m | ( | ... | ) | __riscv_vssseg4e8_v_u8m1_m(__VA_ARGS__) |
| #define vssseg4e8_v_u8m2 | ( | ... | ) | __riscv_vssseg4e8_v_u8m2(__VA_ARGS__) |
| #define vssseg4e8_v_u8m2_m | ( | ... | ) | __riscv_vssseg4e8_v_u8m2_m(__VA_ARGS__) |
| #define vssseg4e8_v_u8mf2 | ( | ... | ) | __riscv_vssseg4e8_v_u8mf2(__VA_ARGS__) |
| #define vssseg4e8_v_u8mf2_m | ( | ... | ) | __riscv_vssseg4e8_v_u8mf2_m(__VA_ARGS__) |
| #define vssseg4e8_v_u8mf4 | ( | ... | ) | __riscv_vssseg4e8_v_u8mf4(__VA_ARGS__) |
| #define vssseg4e8_v_u8mf4_m | ( | ... | ) | __riscv_vssseg4e8_v_u8mf4_m(__VA_ARGS__) |
| #define vssseg4e8_v_u8mf8 | ( | ... | ) | __riscv_vssseg4e8_v_u8mf8(__VA_ARGS__) |
| #define vssseg4e8_v_u8mf8_m | ( | ... | ) | __riscv_vssseg4e8_v_u8mf8_m(__VA_ARGS__) |
| #define vssseg5e16_v_f16m1 | ( | ... | ) | __riscv_vssseg5e16_v_f16m1(__VA_ARGS__) |
| #define vssseg5e16_v_f16m1_m | ( | ... | ) | __riscv_vssseg5e16_v_f16m1_m(__VA_ARGS__) |
| #define vssseg5e16_v_f16mf2 | ( | ... | ) | __riscv_vssseg5e16_v_f16mf2(__VA_ARGS__) |
| #define vssseg5e16_v_f16mf2_m | ( | ... | ) | __riscv_vssseg5e16_v_f16mf2_m(__VA_ARGS__) |
| #define vssseg5e16_v_f16mf4 | ( | ... | ) | __riscv_vssseg5e16_v_f16mf4(__VA_ARGS__) |
| #define vssseg5e16_v_f16mf4_m | ( | ... | ) | __riscv_vssseg5e16_v_f16mf4_m(__VA_ARGS__) |
| #define vssseg5e16_v_i16m1 | ( | ... | ) | __riscv_vssseg5e16_v_i16m1(__VA_ARGS__) |
| #define vssseg5e16_v_i16m1_m | ( | ... | ) | __riscv_vssseg5e16_v_i16m1_m(__VA_ARGS__) |
| #define vssseg5e16_v_i16mf2 | ( | ... | ) | __riscv_vssseg5e16_v_i16mf2(__VA_ARGS__) |
| #define vssseg5e16_v_i16mf2_m | ( | ... | ) | __riscv_vssseg5e16_v_i16mf2_m(__VA_ARGS__) |
| #define vssseg5e16_v_i16mf4 | ( | ... | ) | __riscv_vssseg5e16_v_i16mf4(__VA_ARGS__) |
| #define vssseg5e16_v_i16mf4_m | ( | ... | ) | __riscv_vssseg5e16_v_i16mf4_m(__VA_ARGS__) |
| #define vssseg5e16_v_u16m1 | ( | ... | ) | __riscv_vssseg5e16_v_u16m1(__VA_ARGS__) |
| #define vssseg5e16_v_u16m1_m | ( | ... | ) | __riscv_vssseg5e16_v_u16m1_m(__VA_ARGS__) |
| #define vssseg5e16_v_u16mf2 | ( | ... | ) | __riscv_vssseg5e16_v_u16mf2(__VA_ARGS__) |
| #define vssseg5e16_v_u16mf2_m | ( | ... | ) | __riscv_vssseg5e16_v_u16mf2_m(__VA_ARGS__) |
| #define vssseg5e16_v_u16mf4 | ( | ... | ) | __riscv_vssseg5e16_v_u16mf4(__VA_ARGS__) |
| #define vssseg5e16_v_u16mf4_m | ( | ... | ) | __riscv_vssseg5e16_v_u16mf4_m(__VA_ARGS__) |
| #define vssseg5e32_v_f32m1 | ( | ... | ) | __riscv_vssseg5e32_v_f32m1(__VA_ARGS__) |
| #define vssseg5e32_v_f32m1_m | ( | ... | ) | __riscv_vssseg5e32_v_f32m1_m(__VA_ARGS__) |
| #define vssseg5e32_v_f32mf2 | ( | ... | ) | __riscv_vssseg5e32_v_f32mf2(__VA_ARGS__) |
| #define vssseg5e32_v_f32mf2_m | ( | ... | ) | __riscv_vssseg5e32_v_f32mf2_m(__VA_ARGS__) |
| #define vssseg5e32_v_i32m1 | ( | ... | ) | __riscv_vssseg5e32_v_i32m1(__VA_ARGS__) |
| #define vssseg5e32_v_i32m1_m | ( | ... | ) | __riscv_vssseg5e32_v_i32m1_m(__VA_ARGS__) |
| #define vssseg5e32_v_i32mf2 | ( | ... | ) | __riscv_vssseg5e32_v_i32mf2(__VA_ARGS__) |
| #define vssseg5e32_v_i32mf2_m | ( | ... | ) | __riscv_vssseg5e32_v_i32mf2_m(__VA_ARGS__) |
| #define vssseg5e32_v_u32m1 | ( | ... | ) | __riscv_vssseg5e32_v_u32m1(__VA_ARGS__) |
| #define vssseg5e32_v_u32m1_m | ( | ... | ) | __riscv_vssseg5e32_v_u32m1_m(__VA_ARGS__) |
| #define vssseg5e32_v_u32mf2 | ( | ... | ) | __riscv_vssseg5e32_v_u32mf2(__VA_ARGS__) |
| #define vssseg5e32_v_u32mf2_m | ( | ... | ) | __riscv_vssseg5e32_v_u32mf2_m(__VA_ARGS__) |
| #define vssseg5e64_v_f64m1 | ( | ... | ) | __riscv_vssseg5e64_v_f64m1(__VA_ARGS__) |
| #define vssseg5e64_v_f64m1_m | ( | ... | ) | __riscv_vssseg5e64_v_f64m1_m(__VA_ARGS__) |
| #define vssseg5e64_v_i64m1 | ( | ... | ) | __riscv_vssseg5e64_v_i64m1(__VA_ARGS__) |
| #define vssseg5e64_v_i64m1_m | ( | ... | ) | __riscv_vssseg5e64_v_i64m1_m(__VA_ARGS__) |
| #define vssseg5e64_v_u64m1 | ( | ... | ) | __riscv_vssseg5e64_v_u64m1(__VA_ARGS__) |
| #define vssseg5e64_v_u64m1_m | ( | ... | ) | __riscv_vssseg5e64_v_u64m1_m(__VA_ARGS__) |
| #define vssseg5e8_v_i8m1 | ( | ... | ) | __riscv_vssseg5e8_v_i8m1(__VA_ARGS__) |
| #define vssseg5e8_v_i8m1_m | ( | ... | ) | __riscv_vssseg5e8_v_i8m1_m(__VA_ARGS__) |
| #define vssseg5e8_v_i8mf2 | ( | ... | ) | __riscv_vssseg5e8_v_i8mf2(__VA_ARGS__) |
| #define vssseg5e8_v_i8mf2_m | ( | ... | ) | __riscv_vssseg5e8_v_i8mf2_m(__VA_ARGS__) |
| #define vssseg5e8_v_i8mf4 | ( | ... | ) | __riscv_vssseg5e8_v_i8mf4(__VA_ARGS__) |
| #define vssseg5e8_v_i8mf4_m | ( | ... | ) | __riscv_vssseg5e8_v_i8mf4_m(__VA_ARGS__) |
| #define vssseg5e8_v_i8mf8 | ( | ... | ) | __riscv_vssseg5e8_v_i8mf8(__VA_ARGS__) |
| #define vssseg5e8_v_i8mf8_m | ( | ... | ) | __riscv_vssseg5e8_v_i8mf8_m(__VA_ARGS__) |
| #define vssseg5e8_v_u8m1 | ( | ... | ) | __riscv_vssseg5e8_v_u8m1(__VA_ARGS__) |
| #define vssseg5e8_v_u8m1_m | ( | ... | ) | __riscv_vssseg5e8_v_u8m1_m(__VA_ARGS__) |
| #define vssseg5e8_v_u8mf2 | ( | ... | ) | __riscv_vssseg5e8_v_u8mf2(__VA_ARGS__) |
| #define vssseg5e8_v_u8mf2_m | ( | ... | ) | __riscv_vssseg5e8_v_u8mf2_m(__VA_ARGS__) |
| #define vssseg5e8_v_u8mf4 | ( | ... | ) | __riscv_vssseg5e8_v_u8mf4(__VA_ARGS__) |
| #define vssseg5e8_v_u8mf4_m | ( | ... | ) | __riscv_vssseg5e8_v_u8mf4_m(__VA_ARGS__) |
| #define vssseg5e8_v_u8mf8 | ( | ... | ) | __riscv_vssseg5e8_v_u8mf8(__VA_ARGS__) |
| #define vssseg5e8_v_u8mf8_m | ( | ... | ) | __riscv_vssseg5e8_v_u8mf8_m(__VA_ARGS__) |
| #define vssseg6e16_v_f16m1 | ( | ... | ) | __riscv_vssseg6e16_v_f16m1(__VA_ARGS__) |
| #define vssseg6e16_v_f16m1_m | ( | ... | ) | __riscv_vssseg6e16_v_f16m1_m(__VA_ARGS__) |
| #define vssseg6e16_v_f16mf2 | ( | ... | ) | __riscv_vssseg6e16_v_f16mf2(__VA_ARGS__) |
| #define vssseg6e16_v_f16mf2_m | ( | ... | ) | __riscv_vssseg6e16_v_f16mf2_m(__VA_ARGS__) |
| #define vssseg6e16_v_f16mf4 | ( | ... | ) | __riscv_vssseg6e16_v_f16mf4(__VA_ARGS__) |
| #define vssseg6e16_v_f16mf4_m | ( | ... | ) | __riscv_vssseg6e16_v_f16mf4_m(__VA_ARGS__) |
| #define vssseg6e16_v_i16m1 | ( | ... | ) | __riscv_vssseg6e16_v_i16m1(__VA_ARGS__) |
| #define vssseg6e16_v_i16m1_m | ( | ... | ) | __riscv_vssseg6e16_v_i16m1_m(__VA_ARGS__) |
| #define vssseg6e16_v_i16mf2 | ( | ... | ) | __riscv_vssseg6e16_v_i16mf2(__VA_ARGS__) |
| #define vssseg6e16_v_i16mf2_m | ( | ... | ) | __riscv_vssseg6e16_v_i16mf2_m(__VA_ARGS__) |
| #define vssseg6e16_v_i16mf4 | ( | ... | ) | __riscv_vssseg6e16_v_i16mf4(__VA_ARGS__) |
| #define vssseg6e16_v_i16mf4_m | ( | ... | ) | __riscv_vssseg6e16_v_i16mf4_m(__VA_ARGS__) |
| #define vssseg6e16_v_u16m1 | ( | ... | ) | __riscv_vssseg6e16_v_u16m1(__VA_ARGS__) |
| #define vssseg6e16_v_u16m1_m | ( | ... | ) | __riscv_vssseg6e16_v_u16m1_m(__VA_ARGS__) |
| #define vssseg6e16_v_u16mf2 | ( | ... | ) | __riscv_vssseg6e16_v_u16mf2(__VA_ARGS__) |
| #define vssseg6e16_v_u16mf2_m | ( | ... | ) | __riscv_vssseg6e16_v_u16mf2_m(__VA_ARGS__) |
| #define vssseg6e16_v_u16mf4 | ( | ... | ) | __riscv_vssseg6e16_v_u16mf4(__VA_ARGS__) |
| #define vssseg6e16_v_u16mf4_m | ( | ... | ) | __riscv_vssseg6e16_v_u16mf4_m(__VA_ARGS__) |
| #define vssseg6e32_v_f32m1 | ( | ... | ) | __riscv_vssseg6e32_v_f32m1(__VA_ARGS__) |
| #define vssseg6e32_v_f32m1_m | ( | ... | ) | __riscv_vssseg6e32_v_f32m1_m(__VA_ARGS__) |
| #define vssseg6e32_v_f32mf2 | ( | ... | ) | __riscv_vssseg6e32_v_f32mf2(__VA_ARGS__) |
| #define vssseg6e32_v_f32mf2_m | ( | ... | ) | __riscv_vssseg6e32_v_f32mf2_m(__VA_ARGS__) |
| #define vssseg6e32_v_i32m1 | ( | ... | ) | __riscv_vssseg6e32_v_i32m1(__VA_ARGS__) |
| #define vssseg6e32_v_i32m1_m | ( | ... | ) | __riscv_vssseg6e32_v_i32m1_m(__VA_ARGS__) |
| #define vssseg6e32_v_i32mf2 | ( | ... | ) | __riscv_vssseg6e32_v_i32mf2(__VA_ARGS__) |
| #define vssseg6e32_v_i32mf2_m | ( | ... | ) | __riscv_vssseg6e32_v_i32mf2_m(__VA_ARGS__) |
| #define vssseg6e32_v_u32m1 | ( | ... | ) | __riscv_vssseg6e32_v_u32m1(__VA_ARGS__) |
| #define vssseg6e32_v_u32m1_m | ( | ... | ) | __riscv_vssseg6e32_v_u32m1_m(__VA_ARGS__) |
| #define vssseg6e32_v_u32mf2 | ( | ... | ) | __riscv_vssseg6e32_v_u32mf2(__VA_ARGS__) |
| #define vssseg6e32_v_u32mf2_m | ( | ... | ) | __riscv_vssseg6e32_v_u32mf2_m(__VA_ARGS__) |
| #define vssseg6e64_v_f64m1 | ( | ... | ) | __riscv_vssseg6e64_v_f64m1(__VA_ARGS__) |
| #define vssseg6e64_v_f64m1_m | ( | ... | ) | __riscv_vssseg6e64_v_f64m1_m(__VA_ARGS__) |
| #define vssseg6e64_v_i64m1 | ( | ... | ) | __riscv_vssseg6e64_v_i64m1(__VA_ARGS__) |
| #define vssseg6e64_v_i64m1_m | ( | ... | ) | __riscv_vssseg6e64_v_i64m1_m(__VA_ARGS__) |
| #define vssseg6e64_v_u64m1 | ( | ... | ) | __riscv_vssseg6e64_v_u64m1(__VA_ARGS__) |
| #define vssseg6e64_v_u64m1_m | ( | ... | ) | __riscv_vssseg6e64_v_u64m1_m(__VA_ARGS__) |
| #define vssseg6e8_v_i8m1 | ( | ... | ) | __riscv_vssseg6e8_v_i8m1(__VA_ARGS__) |
| #define vssseg6e8_v_i8m1_m | ( | ... | ) | __riscv_vssseg6e8_v_i8m1_m(__VA_ARGS__) |
| #define vssseg6e8_v_i8mf2 | ( | ... | ) | __riscv_vssseg6e8_v_i8mf2(__VA_ARGS__) |
| #define vssseg6e8_v_i8mf2_m | ( | ... | ) | __riscv_vssseg6e8_v_i8mf2_m(__VA_ARGS__) |
| #define vssseg6e8_v_i8mf4 | ( | ... | ) | __riscv_vssseg6e8_v_i8mf4(__VA_ARGS__) |
| #define vssseg6e8_v_i8mf4_m | ( | ... | ) | __riscv_vssseg6e8_v_i8mf4_m(__VA_ARGS__) |
| #define vssseg6e8_v_i8mf8 | ( | ... | ) | __riscv_vssseg6e8_v_i8mf8(__VA_ARGS__) |
| #define vssseg6e8_v_i8mf8_m | ( | ... | ) | __riscv_vssseg6e8_v_i8mf8_m(__VA_ARGS__) |
| #define vssseg6e8_v_u8m1 | ( | ... | ) | __riscv_vssseg6e8_v_u8m1(__VA_ARGS__) |
| #define vssseg6e8_v_u8m1_m | ( | ... | ) | __riscv_vssseg6e8_v_u8m1_m(__VA_ARGS__) |
| #define vssseg6e8_v_u8mf2 | ( | ... | ) | __riscv_vssseg6e8_v_u8mf2(__VA_ARGS__) |
| #define vssseg6e8_v_u8mf2_m | ( | ... | ) | __riscv_vssseg6e8_v_u8mf2_m(__VA_ARGS__) |
| #define vssseg6e8_v_u8mf4 | ( | ... | ) | __riscv_vssseg6e8_v_u8mf4(__VA_ARGS__) |
| #define vssseg6e8_v_u8mf4_m | ( | ... | ) | __riscv_vssseg6e8_v_u8mf4_m(__VA_ARGS__) |
| #define vssseg6e8_v_u8mf8 | ( | ... | ) | __riscv_vssseg6e8_v_u8mf8(__VA_ARGS__) |
| #define vssseg6e8_v_u8mf8_m | ( | ... | ) | __riscv_vssseg6e8_v_u8mf8_m(__VA_ARGS__) |
| #define vssseg7e16_v_f16m1 | ( | ... | ) | __riscv_vssseg7e16_v_f16m1(__VA_ARGS__) |
| #define vssseg7e16_v_f16m1_m | ( | ... | ) | __riscv_vssseg7e16_v_f16m1_m(__VA_ARGS__) |
| #define vssseg7e16_v_f16mf2 | ( | ... | ) | __riscv_vssseg7e16_v_f16mf2(__VA_ARGS__) |
| #define vssseg7e16_v_f16mf2_m | ( | ... | ) | __riscv_vssseg7e16_v_f16mf2_m(__VA_ARGS__) |
| #define vssseg7e16_v_f16mf4 | ( | ... | ) | __riscv_vssseg7e16_v_f16mf4(__VA_ARGS__) |
| #define vssseg7e16_v_f16mf4_m | ( | ... | ) | __riscv_vssseg7e16_v_f16mf4_m(__VA_ARGS__) |
| #define vssseg7e16_v_i16m1 | ( | ... | ) | __riscv_vssseg7e16_v_i16m1(__VA_ARGS__) |
| #define vssseg7e16_v_i16m1_m | ( | ... | ) | __riscv_vssseg7e16_v_i16m1_m(__VA_ARGS__) |
| #define vssseg7e16_v_i16mf2 | ( | ... | ) | __riscv_vssseg7e16_v_i16mf2(__VA_ARGS__) |
| #define vssseg7e16_v_i16mf2_m | ( | ... | ) | __riscv_vssseg7e16_v_i16mf2_m(__VA_ARGS__) |
| #define vssseg7e16_v_i16mf4 | ( | ... | ) | __riscv_vssseg7e16_v_i16mf4(__VA_ARGS__) |
| #define vssseg7e16_v_i16mf4_m | ( | ... | ) | __riscv_vssseg7e16_v_i16mf4_m(__VA_ARGS__) |
| #define vssseg7e16_v_u16m1 | ( | ... | ) | __riscv_vssseg7e16_v_u16m1(__VA_ARGS__) |
| #define vssseg7e16_v_u16m1_m | ( | ... | ) | __riscv_vssseg7e16_v_u16m1_m(__VA_ARGS__) |
| #define vssseg7e16_v_u16mf2 | ( | ... | ) | __riscv_vssseg7e16_v_u16mf2(__VA_ARGS__) |
| #define vssseg7e16_v_u16mf2_m | ( | ... | ) | __riscv_vssseg7e16_v_u16mf2_m(__VA_ARGS__) |
| #define vssseg7e16_v_u16mf4 | ( | ... | ) | __riscv_vssseg7e16_v_u16mf4(__VA_ARGS__) |
| #define vssseg7e16_v_u16mf4_m | ( | ... | ) | __riscv_vssseg7e16_v_u16mf4_m(__VA_ARGS__) |
| #define vssseg7e32_v_f32m1 | ( | ... | ) | __riscv_vssseg7e32_v_f32m1(__VA_ARGS__) |
| #define vssseg7e32_v_f32m1_m | ( | ... | ) | __riscv_vssseg7e32_v_f32m1_m(__VA_ARGS__) |
| #define vssseg7e32_v_f32mf2 | ( | ... | ) | __riscv_vssseg7e32_v_f32mf2(__VA_ARGS__) |
| #define vssseg7e32_v_f32mf2_m | ( | ... | ) | __riscv_vssseg7e32_v_f32mf2_m(__VA_ARGS__) |
| #define vssseg7e32_v_i32m1 | ( | ... | ) | __riscv_vssseg7e32_v_i32m1(__VA_ARGS__) |
| #define vssseg7e32_v_i32m1_m | ( | ... | ) | __riscv_vssseg7e32_v_i32m1_m(__VA_ARGS__) |
| #define vssseg7e32_v_i32mf2 | ( | ... | ) | __riscv_vssseg7e32_v_i32mf2(__VA_ARGS__) |
| #define vssseg7e32_v_i32mf2_m | ( | ... | ) | __riscv_vssseg7e32_v_i32mf2_m(__VA_ARGS__) |
| #define vssseg7e32_v_u32m1 | ( | ... | ) | __riscv_vssseg7e32_v_u32m1(__VA_ARGS__) |
| #define vssseg7e32_v_u32m1_m | ( | ... | ) | __riscv_vssseg7e32_v_u32m1_m(__VA_ARGS__) |
| #define vssseg7e32_v_u32mf2 | ( | ... | ) | __riscv_vssseg7e32_v_u32mf2(__VA_ARGS__) |
| #define vssseg7e32_v_u32mf2_m | ( | ... | ) | __riscv_vssseg7e32_v_u32mf2_m(__VA_ARGS__) |
| #define vssseg7e64_v_f64m1 | ( | ... | ) | __riscv_vssseg7e64_v_f64m1(__VA_ARGS__) |
| #define vssseg7e64_v_f64m1_m | ( | ... | ) | __riscv_vssseg7e64_v_f64m1_m(__VA_ARGS__) |
| #define vssseg7e64_v_i64m1 | ( | ... | ) | __riscv_vssseg7e64_v_i64m1(__VA_ARGS__) |
| #define vssseg7e64_v_i64m1_m | ( | ... | ) | __riscv_vssseg7e64_v_i64m1_m(__VA_ARGS__) |
| #define vssseg7e64_v_u64m1 | ( | ... | ) | __riscv_vssseg7e64_v_u64m1(__VA_ARGS__) |
| #define vssseg7e64_v_u64m1_m | ( | ... | ) | __riscv_vssseg7e64_v_u64m1_m(__VA_ARGS__) |
| #define vssseg7e8_v_i8m1 | ( | ... | ) | __riscv_vssseg7e8_v_i8m1(__VA_ARGS__) |
| #define vssseg7e8_v_i8m1_m | ( | ... | ) | __riscv_vssseg7e8_v_i8m1_m(__VA_ARGS__) |
| #define vssseg7e8_v_i8mf2 | ( | ... | ) | __riscv_vssseg7e8_v_i8mf2(__VA_ARGS__) |
| #define vssseg7e8_v_i8mf2_m | ( | ... | ) | __riscv_vssseg7e8_v_i8mf2_m(__VA_ARGS__) |
| #define vssseg7e8_v_i8mf4 | ( | ... | ) | __riscv_vssseg7e8_v_i8mf4(__VA_ARGS__) |
| #define vssseg7e8_v_i8mf4_m | ( | ... | ) | __riscv_vssseg7e8_v_i8mf4_m(__VA_ARGS__) |
| #define vssseg7e8_v_i8mf8 | ( | ... | ) | __riscv_vssseg7e8_v_i8mf8(__VA_ARGS__) |
| #define vssseg7e8_v_i8mf8_m | ( | ... | ) | __riscv_vssseg7e8_v_i8mf8_m(__VA_ARGS__) |
| #define vssseg7e8_v_u8m1 | ( | ... | ) | __riscv_vssseg7e8_v_u8m1(__VA_ARGS__) |
| #define vssseg7e8_v_u8m1_m | ( | ... | ) | __riscv_vssseg7e8_v_u8m1_m(__VA_ARGS__) |
| #define vssseg7e8_v_u8mf2 | ( | ... | ) | __riscv_vssseg7e8_v_u8mf2(__VA_ARGS__) |
| #define vssseg7e8_v_u8mf2_m | ( | ... | ) | __riscv_vssseg7e8_v_u8mf2_m(__VA_ARGS__) |
| #define vssseg7e8_v_u8mf4 | ( | ... | ) | __riscv_vssseg7e8_v_u8mf4(__VA_ARGS__) |
| #define vssseg7e8_v_u8mf4_m | ( | ... | ) | __riscv_vssseg7e8_v_u8mf4_m(__VA_ARGS__) |
| #define vssseg7e8_v_u8mf8 | ( | ... | ) | __riscv_vssseg7e8_v_u8mf8(__VA_ARGS__) |
| #define vssseg7e8_v_u8mf8_m | ( | ... | ) | __riscv_vssseg7e8_v_u8mf8_m(__VA_ARGS__) |
| #define vssseg8e16_v_f16m1 | ( | ... | ) | __riscv_vssseg8e16_v_f16m1(__VA_ARGS__) |
| #define vssseg8e16_v_f16m1_m | ( | ... | ) | __riscv_vssseg8e16_v_f16m1_m(__VA_ARGS__) |
| #define vssseg8e16_v_f16mf2 | ( | ... | ) | __riscv_vssseg8e16_v_f16mf2(__VA_ARGS__) |
| #define vssseg8e16_v_f16mf2_m | ( | ... | ) | __riscv_vssseg8e16_v_f16mf2_m(__VA_ARGS__) |
| #define vssseg8e16_v_f16mf4 | ( | ... | ) | __riscv_vssseg8e16_v_f16mf4(__VA_ARGS__) |
| #define vssseg8e16_v_f16mf4_m | ( | ... | ) | __riscv_vssseg8e16_v_f16mf4_m(__VA_ARGS__) |
| #define vssseg8e16_v_i16m1 | ( | ... | ) | __riscv_vssseg8e16_v_i16m1(__VA_ARGS__) |
| #define vssseg8e16_v_i16m1_m | ( | ... | ) | __riscv_vssseg8e16_v_i16m1_m(__VA_ARGS__) |
| #define vssseg8e16_v_i16mf2 | ( | ... | ) | __riscv_vssseg8e16_v_i16mf2(__VA_ARGS__) |
| #define vssseg8e16_v_i16mf2_m | ( | ... | ) | __riscv_vssseg8e16_v_i16mf2_m(__VA_ARGS__) |
| #define vssseg8e16_v_i16mf4 | ( | ... | ) | __riscv_vssseg8e16_v_i16mf4(__VA_ARGS__) |
| #define vssseg8e16_v_i16mf4_m | ( | ... | ) | __riscv_vssseg8e16_v_i16mf4_m(__VA_ARGS__) |
| #define vssseg8e16_v_u16m1 | ( | ... | ) | __riscv_vssseg8e16_v_u16m1(__VA_ARGS__) |
| #define vssseg8e16_v_u16m1_m | ( | ... | ) | __riscv_vssseg8e16_v_u16m1_m(__VA_ARGS__) |
| #define vssseg8e16_v_u16mf2 | ( | ... | ) | __riscv_vssseg8e16_v_u16mf2(__VA_ARGS__) |
| #define vssseg8e16_v_u16mf2_m | ( | ... | ) | __riscv_vssseg8e16_v_u16mf2_m(__VA_ARGS__) |
| #define vssseg8e16_v_u16mf4 | ( | ... | ) | __riscv_vssseg8e16_v_u16mf4(__VA_ARGS__) |
| #define vssseg8e16_v_u16mf4_m | ( | ... | ) | __riscv_vssseg8e16_v_u16mf4_m(__VA_ARGS__) |
| #define vssseg8e32_v_f32m1 | ( | ... | ) | __riscv_vssseg8e32_v_f32m1(__VA_ARGS__) |
| #define vssseg8e32_v_f32m1_m | ( | ... | ) | __riscv_vssseg8e32_v_f32m1_m(__VA_ARGS__) |
| #define vssseg8e32_v_f32mf2 | ( | ... | ) | __riscv_vssseg8e32_v_f32mf2(__VA_ARGS__) |
| #define vssseg8e32_v_f32mf2_m | ( | ... | ) | __riscv_vssseg8e32_v_f32mf2_m(__VA_ARGS__) |
| #define vssseg8e32_v_i32m1 | ( | ... | ) | __riscv_vssseg8e32_v_i32m1(__VA_ARGS__) |
| #define vssseg8e32_v_i32m1_m | ( | ... | ) | __riscv_vssseg8e32_v_i32m1_m(__VA_ARGS__) |
| #define vssseg8e32_v_i32mf2 | ( | ... | ) | __riscv_vssseg8e32_v_i32mf2(__VA_ARGS__) |
| #define vssseg8e32_v_i32mf2_m | ( | ... | ) | __riscv_vssseg8e32_v_i32mf2_m(__VA_ARGS__) |
| #define vssseg8e32_v_u32m1 | ( | ... | ) | __riscv_vssseg8e32_v_u32m1(__VA_ARGS__) |
| #define vssseg8e32_v_u32m1_m | ( | ... | ) | __riscv_vssseg8e32_v_u32m1_m(__VA_ARGS__) |
| #define vssseg8e32_v_u32mf2 | ( | ... | ) | __riscv_vssseg8e32_v_u32mf2(__VA_ARGS__) |
| #define vssseg8e32_v_u32mf2_m | ( | ... | ) | __riscv_vssseg8e32_v_u32mf2_m(__VA_ARGS__) |
| #define vssseg8e64_v_f64m1 | ( | ... | ) | __riscv_vssseg8e64_v_f64m1(__VA_ARGS__) |
| #define vssseg8e64_v_f64m1_m | ( | ... | ) | __riscv_vssseg8e64_v_f64m1_m(__VA_ARGS__) |
| #define vssseg8e64_v_i64m1 | ( | ... | ) | __riscv_vssseg8e64_v_i64m1(__VA_ARGS__) |
| #define vssseg8e64_v_i64m1_m | ( | ... | ) | __riscv_vssseg8e64_v_i64m1_m(__VA_ARGS__) |
| #define vssseg8e64_v_u64m1 | ( | ... | ) | __riscv_vssseg8e64_v_u64m1(__VA_ARGS__) |
| #define vssseg8e64_v_u64m1_m | ( | ... | ) | __riscv_vssseg8e64_v_u64m1_m(__VA_ARGS__) |
| #define vssseg8e8_v_i8m1 | ( | ... | ) | __riscv_vssseg8e8_v_i8m1(__VA_ARGS__) |
| #define vssseg8e8_v_i8m1_m | ( | ... | ) | __riscv_vssseg8e8_v_i8m1_m(__VA_ARGS__) |
| #define vssseg8e8_v_i8mf2 | ( | ... | ) | __riscv_vssseg8e8_v_i8mf2(__VA_ARGS__) |
| #define vssseg8e8_v_i8mf2_m | ( | ... | ) | __riscv_vssseg8e8_v_i8mf2_m(__VA_ARGS__) |
| #define vssseg8e8_v_i8mf4 | ( | ... | ) | __riscv_vssseg8e8_v_i8mf4(__VA_ARGS__) |
| #define vssseg8e8_v_i8mf4_m | ( | ... | ) | __riscv_vssseg8e8_v_i8mf4_m(__VA_ARGS__) |
| #define vssseg8e8_v_i8mf8 | ( | ... | ) | __riscv_vssseg8e8_v_i8mf8(__VA_ARGS__) |
| #define vssseg8e8_v_i8mf8_m | ( | ... | ) | __riscv_vssseg8e8_v_i8mf8_m(__VA_ARGS__) |
| #define vssseg8e8_v_u8m1 | ( | ... | ) | __riscv_vssseg8e8_v_u8m1(__VA_ARGS__) |
| #define vssseg8e8_v_u8m1_m | ( | ... | ) | __riscv_vssseg8e8_v_u8m1_m(__VA_ARGS__) |
| #define vssseg8e8_v_u8mf2 | ( | ... | ) | __riscv_vssseg8e8_v_u8mf2(__VA_ARGS__) |
| #define vssseg8e8_v_u8mf2_m | ( | ... | ) | __riscv_vssseg8e8_v_u8mf2_m(__VA_ARGS__) |
| #define vssseg8e8_v_u8mf4 | ( | ... | ) | __riscv_vssseg8e8_v_u8mf4(__VA_ARGS__) |
| #define vssseg8e8_v_u8mf4_m | ( | ... | ) | __riscv_vssseg8e8_v_u8mf4_m(__VA_ARGS__) |
| #define vssseg8e8_v_u8mf8 | ( | ... | ) | __riscv_vssseg8e8_v_u8mf8(__VA_ARGS__) |
| #define vssseg8e8_v_u8mf8_m | ( | ... | ) | __riscv_vssseg8e8_v_u8mf8_m(__VA_ARGS__) |
| #define vssub_vv_i16m1 | ( | ... | ) | __riscv_vssub_vv_i16m1(__VA_ARGS__) |
| #define vssub_vv_i16m1_m | ( | ... | ) | __riscv_vssub_vv_i16m1_tumu(__VA_ARGS__) |
| #define vssub_vv_i16m2 | ( | ... | ) | __riscv_vssub_vv_i16m2(__VA_ARGS__) |
| #define vssub_vv_i16m2_m | ( | ... | ) | __riscv_vssub_vv_i16m2_tumu(__VA_ARGS__) |
| #define vssub_vv_i16m4 | ( | ... | ) | __riscv_vssub_vv_i16m4(__VA_ARGS__) |
| #define vssub_vv_i16m4_m | ( | ... | ) | __riscv_vssub_vv_i16m4_tumu(__VA_ARGS__) |
| #define vssub_vv_i16m8 | ( | ... | ) | __riscv_vssub_vv_i16m8(__VA_ARGS__) |
| #define vssub_vv_i16m8_m | ( | ... | ) | __riscv_vssub_vv_i16m8_tumu(__VA_ARGS__) |
| #define vssub_vv_i16mf2 | ( | ... | ) | __riscv_vssub_vv_i16mf2(__VA_ARGS__) |
| #define vssub_vv_i16mf2_m | ( | ... | ) | __riscv_vssub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vssub_vv_i16mf4 | ( | ... | ) | __riscv_vssub_vv_i16mf4(__VA_ARGS__) |
| #define vssub_vv_i16mf4_m | ( | ... | ) | __riscv_vssub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vssub_vv_i32m1 | ( | ... | ) | __riscv_vssub_vv_i32m1(__VA_ARGS__) |
| #define vssub_vv_i32m1_m | ( | ... | ) | __riscv_vssub_vv_i32m1_tumu(__VA_ARGS__) |
| #define vssub_vv_i32m2 | ( | ... | ) | __riscv_vssub_vv_i32m2(__VA_ARGS__) |
| #define vssub_vv_i32m2_m | ( | ... | ) | __riscv_vssub_vv_i32m2_tumu(__VA_ARGS__) |
| #define vssub_vv_i32m4 | ( | ... | ) | __riscv_vssub_vv_i32m4(__VA_ARGS__) |
| #define vssub_vv_i32m4_m | ( | ... | ) | __riscv_vssub_vv_i32m4_tumu(__VA_ARGS__) |
| #define vssub_vv_i32m8 | ( | ... | ) | __riscv_vssub_vv_i32m8(__VA_ARGS__) |
| #define vssub_vv_i32m8_m | ( | ... | ) | __riscv_vssub_vv_i32m8_tumu(__VA_ARGS__) |
| #define vssub_vv_i32mf2 | ( | ... | ) | __riscv_vssub_vv_i32mf2(__VA_ARGS__) |
| #define vssub_vv_i32mf2_m | ( | ... | ) | __riscv_vssub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vssub_vv_i64m1 | ( | ... | ) | __riscv_vssub_vv_i64m1(__VA_ARGS__) |
| #define vssub_vv_i64m1_m | ( | ... | ) | __riscv_vssub_vv_i64m1_tumu(__VA_ARGS__) |
| #define vssub_vv_i64m2 | ( | ... | ) | __riscv_vssub_vv_i64m2(__VA_ARGS__) |
| #define vssub_vv_i64m2_m | ( | ... | ) | __riscv_vssub_vv_i64m2_tumu(__VA_ARGS__) |
| #define vssub_vv_i64m4 | ( | ... | ) | __riscv_vssub_vv_i64m4(__VA_ARGS__) |
| #define vssub_vv_i64m4_m | ( | ... | ) | __riscv_vssub_vv_i64m4_tumu(__VA_ARGS__) |
| #define vssub_vv_i64m8 | ( | ... | ) | __riscv_vssub_vv_i64m8(__VA_ARGS__) |
| #define vssub_vv_i64m8_m | ( | ... | ) | __riscv_vssub_vv_i64m8_tumu(__VA_ARGS__) |
| #define vssub_vv_i8m1 | ( | ... | ) | __riscv_vssub_vv_i8m1(__VA_ARGS__) |
| #define vssub_vv_i8m1_m | ( | ... | ) | __riscv_vssub_vv_i8m1_tumu(__VA_ARGS__) |
| #define vssub_vv_i8m2 | ( | ... | ) | __riscv_vssub_vv_i8m2(__VA_ARGS__) |
| #define vssub_vv_i8m2_m | ( | ... | ) | __riscv_vssub_vv_i8m2_tumu(__VA_ARGS__) |
| #define vssub_vv_i8m4 | ( | ... | ) | __riscv_vssub_vv_i8m4(__VA_ARGS__) |
| #define vssub_vv_i8m4_m | ( | ... | ) | __riscv_vssub_vv_i8m4_tumu(__VA_ARGS__) |
| #define vssub_vv_i8m8 | ( | ... | ) | __riscv_vssub_vv_i8m8(__VA_ARGS__) |
| #define vssub_vv_i8m8_m | ( | ... | ) | __riscv_vssub_vv_i8m8_tumu(__VA_ARGS__) |
| #define vssub_vv_i8mf2 | ( | ... | ) | __riscv_vssub_vv_i8mf2(__VA_ARGS__) |
| #define vssub_vv_i8mf2_m | ( | ... | ) | __riscv_vssub_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vssub_vv_i8mf4 | ( | ... | ) | __riscv_vssub_vv_i8mf4(__VA_ARGS__) |
| #define vssub_vv_i8mf4_m | ( | ... | ) | __riscv_vssub_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vssub_vv_i8mf8 | ( | ... | ) | __riscv_vssub_vv_i8mf8(__VA_ARGS__) |
| #define vssub_vv_i8mf8_m | ( | ... | ) | __riscv_vssub_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vssub_vx_i16m1 | ( | ... | ) | __riscv_vssub_vx_i16m1(__VA_ARGS__) |
| #define vssub_vx_i16m1_m | ( | ... | ) | __riscv_vssub_vx_i16m1_tumu(__VA_ARGS__) |
| #define vssub_vx_i16m2 | ( | ... | ) | __riscv_vssub_vx_i16m2(__VA_ARGS__) |
| #define vssub_vx_i16m2_m | ( | ... | ) | __riscv_vssub_vx_i16m2_tumu(__VA_ARGS__) |
| #define vssub_vx_i16m4 | ( | ... | ) | __riscv_vssub_vx_i16m4(__VA_ARGS__) |
| #define vssub_vx_i16m4_m | ( | ... | ) | __riscv_vssub_vx_i16m4_tumu(__VA_ARGS__) |
| #define vssub_vx_i16m8 | ( | ... | ) | __riscv_vssub_vx_i16m8(__VA_ARGS__) |
| #define vssub_vx_i16m8_m | ( | ... | ) | __riscv_vssub_vx_i16m8_tumu(__VA_ARGS__) |
| #define vssub_vx_i16mf2 | ( | ... | ) | __riscv_vssub_vx_i16mf2(__VA_ARGS__) |
| #define vssub_vx_i16mf2_m | ( | ... | ) | __riscv_vssub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vssub_vx_i16mf4 | ( | ... | ) | __riscv_vssub_vx_i16mf4(__VA_ARGS__) |
| #define vssub_vx_i16mf4_m | ( | ... | ) | __riscv_vssub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vssub_vx_i32m1 | ( | ... | ) | __riscv_vssub_vx_i32m1(__VA_ARGS__) |
| #define vssub_vx_i32m1_m | ( | ... | ) | __riscv_vssub_vx_i32m1_tumu(__VA_ARGS__) |
| #define vssub_vx_i32m2 | ( | ... | ) | __riscv_vssub_vx_i32m2(__VA_ARGS__) |
| #define vssub_vx_i32m2_m | ( | ... | ) | __riscv_vssub_vx_i32m2_tumu(__VA_ARGS__) |
| #define vssub_vx_i32m4 | ( | ... | ) | __riscv_vssub_vx_i32m4(__VA_ARGS__) |
| #define vssub_vx_i32m4_m | ( | ... | ) | __riscv_vssub_vx_i32m4_tumu(__VA_ARGS__) |
| #define vssub_vx_i32m8 | ( | ... | ) | __riscv_vssub_vx_i32m8(__VA_ARGS__) |
| #define vssub_vx_i32m8_m | ( | ... | ) | __riscv_vssub_vx_i32m8_tumu(__VA_ARGS__) |
| #define vssub_vx_i32mf2 | ( | ... | ) | __riscv_vssub_vx_i32mf2(__VA_ARGS__) |
| #define vssub_vx_i32mf2_m | ( | ... | ) | __riscv_vssub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vssub_vx_i64m1 | ( | ... | ) | __riscv_vssub_vx_i64m1(__VA_ARGS__) |
| #define vssub_vx_i64m1_m | ( | ... | ) | __riscv_vssub_vx_i64m1_tumu(__VA_ARGS__) |
| #define vssub_vx_i64m2 | ( | ... | ) | __riscv_vssub_vx_i64m2(__VA_ARGS__) |
| #define vssub_vx_i64m2_m | ( | ... | ) | __riscv_vssub_vx_i64m2_tumu(__VA_ARGS__) |
| #define vssub_vx_i64m4 | ( | ... | ) | __riscv_vssub_vx_i64m4(__VA_ARGS__) |
| #define vssub_vx_i64m4_m | ( | ... | ) | __riscv_vssub_vx_i64m4_tumu(__VA_ARGS__) |
| #define vssub_vx_i64m8 | ( | ... | ) | __riscv_vssub_vx_i64m8(__VA_ARGS__) |
| #define vssub_vx_i64m8_m | ( | ... | ) | __riscv_vssub_vx_i64m8_tumu(__VA_ARGS__) |
| #define vssub_vx_i8m1 | ( | ... | ) | __riscv_vssub_vx_i8m1(__VA_ARGS__) |
| #define vssub_vx_i8m1_m | ( | ... | ) | __riscv_vssub_vx_i8m1_tumu(__VA_ARGS__) |
| #define vssub_vx_i8m2 | ( | ... | ) | __riscv_vssub_vx_i8m2(__VA_ARGS__) |
| #define vssub_vx_i8m2_m | ( | ... | ) | __riscv_vssub_vx_i8m2_tumu(__VA_ARGS__) |
| #define vssub_vx_i8m4 | ( | ... | ) | __riscv_vssub_vx_i8m4(__VA_ARGS__) |
| #define vssub_vx_i8m4_m | ( | ... | ) | __riscv_vssub_vx_i8m4_tumu(__VA_ARGS__) |
| #define vssub_vx_i8m8 | ( | ... | ) | __riscv_vssub_vx_i8m8(__VA_ARGS__) |
| #define vssub_vx_i8m8_m | ( | ... | ) | __riscv_vssub_vx_i8m8_tumu(__VA_ARGS__) |
| #define vssub_vx_i8mf2 | ( | ... | ) | __riscv_vssub_vx_i8mf2(__VA_ARGS__) |
| #define vssub_vx_i8mf2_m | ( | ... | ) | __riscv_vssub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vssub_vx_i8mf4 | ( | ... | ) | __riscv_vssub_vx_i8mf4(__VA_ARGS__) |
| #define vssub_vx_i8mf4_m | ( | ... | ) | __riscv_vssub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vssub_vx_i8mf8 | ( | ... | ) | __riscv_vssub_vx_i8mf8(__VA_ARGS__) |
| #define vssub_vx_i8mf8_m | ( | ... | ) | __riscv_vssub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vssubu_vv_u16m1 | ( | ... | ) | __riscv_vssubu_vv_u16m1(__VA_ARGS__) |
| #define vssubu_vv_u16m1_m | ( | ... | ) | __riscv_vssubu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vssubu_vv_u16m2 | ( | ... | ) | __riscv_vssubu_vv_u16m2(__VA_ARGS__) |
| #define vssubu_vv_u16m2_m | ( | ... | ) | __riscv_vssubu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vssubu_vv_u16m4 | ( | ... | ) | __riscv_vssubu_vv_u16m4(__VA_ARGS__) |
| #define vssubu_vv_u16m4_m | ( | ... | ) | __riscv_vssubu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vssubu_vv_u16m8 | ( | ... | ) | __riscv_vssubu_vv_u16m8(__VA_ARGS__) |
| #define vssubu_vv_u16m8_m | ( | ... | ) | __riscv_vssubu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vssubu_vv_u16mf2 | ( | ... | ) | __riscv_vssubu_vv_u16mf2(__VA_ARGS__) |
| #define vssubu_vv_u16mf2_m | ( | ... | ) | __riscv_vssubu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vssubu_vv_u16mf4 | ( | ... | ) | __riscv_vssubu_vv_u16mf4(__VA_ARGS__) |
| #define vssubu_vv_u16mf4_m | ( | ... | ) | __riscv_vssubu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vssubu_vv_u32m1 | ( | ... | ) | __riscv_vssubu_vv_u32m1(__VA_ARGS__) |
| #define vssubu_vv_u32m1_m | ( | ... | ) | __riscv_vssubu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vssubu_vv_u32m2 | ( | ... | ) | __riscv_vssubu_vv_u32m2(__VA_ARGS__) |
| #define vssubu_vv_u32m2_m | ( | ... | ) | __riscv_vssubu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vssubu_vv_u32m4 | ( | ... | ) | __riscv_vssubu_vv_u32m4(__VA_ARGS__) |
| #define vssubu_vv_u32m4_m | ( | ... | ) | __riscv_vssubu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vssubu_vv_u32m8 | ( | ... | ) | __riscv_vssubu_vv_u32m8(__VA_ARGS__) |
| #define vssubu_vv_u32m8_m | ( | ... | ) | __riscv_vssubu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vssubu_vv_u32mf2 | ( | ... | ) | __riscv_vssubu_vv_u32mf2(__VA_ARGS__) |
| #define vssubu_vv_u32mf2_m | ( | ... | ) | __riscv_vssubu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vssubu_vv_u64m1 | ( | ... | ) | __riscv_vssubu_vv_u64m1(__VA_ARGS__) |
| #define vssubu_vv_u64m1_m | ( | ... | ) | __riscv_vssubu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vssubu_vv_u64m2 | ( | ... | ) | __riscv_vssubu_vv_u64m2(__VA_ARGS__) |
| #define vssubu_vv_u64m2_m | ( | ... | ) | __riscv_vssubu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vssubu_vv_u64m4 | ( | ... | ) | __riscv_vssubu_vv_u64m4(__VA_ARGS__) |
| #define vssubu_vv_u64m4_m | ( | ... | ) | __riscv_vssubu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vssubu_vv_u64m8 | ( | ... | ) | __riscv_vssubu_vv_u64m8(__VA_ARGS__) |
| #define vssubu_vv_u64m8_m | ( | ... | ) | __riscv_vssubu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vssubu_vv_u8m1 | ( | ... | ) | __riscv_vssubu_vv_u8m1(__VA_ARGS__) |
| #define vssubu_vv_u8m1_m | ( | ... | ) | __riscv_vssubu_vv_u8m1_tumu(__VA_ARGS__) |
| #define vssubu_vv_u8m2 | ( | ... | ) | __riscv_vssubu_vv_u8m2(__VA_ARGS__) |
| #define vssubu_vv_u8m2_m | ( | ... | ) | __riscv_vssubu_vv_u8m2_tumu(__VA_ARGS__) |
| #define vssubu_vv_u8m4 | ( | ... | ) | __riscv_vssubu_vv_u8m4(__VA_ARGS__) |
| #define vssubu_vv_u8m4_m | ( | ... | ) | __riscv_vssubu_vv_u8m4_tumu(__VA_ARGS__) |
| #define vssubu_vv_u8m8 | ( | ... | ) | __riscv_vssubu_vv_u8m8(__VA_ARGS__) |
| #define vssubu_vv_u8m8_m | ( | ... | ) | __riscv_vssubu_vv_u8m8_tumu(__VA_ARGS__) |
| #define vssubu_vv_u8mf2 | ( | ... | ) | __riscv_vssubu_vv_u8mf2(__VA_ARGS__) |
| #define vssubu_vv_u8mf2_m | ( | ... | ) | __riscv_vssubu_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vssubu_vv_u8mf4 | ( | ... | ) | __riscv_vssubu_vv_u8mf4(__VA_ARGS__) |
| #define vssubu_vv_u8mf4_m | ( | ... | ) | __riscv_vssubu_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vssubu_vv_u8mf8 | ( | ... | ) | __riscv_vssubu_vv_u8mf8(__VA_ARGS__) |
| #define vssubu_vv_u8mf8_m | ( | ... | ) | __riscv_vssubu_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vssubu_vx_u16m1 | ( | ... | ) | __riscv_vssubu_vx_u16m1(__VA_ARGS__) |
| #define vssubu_vx_u16m1_m | ( | ... | ) | __riscv_vssubu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vssubu_vx_u16m2 | ( | ... | ) | __riscv_vssubu_vx_u16m2(__VA_ARGS__) |
| #define vssubu_vx_u16m2_m | ( | ... | ) | __riscv_vssubu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vssubu_vx_u16m4 | ( | ... | ) | __riscv_vssubu_vx_u16m4(__VA_ARGS__) |
| #define vssubu_vx_u16m4_m | ( | ... | ) | __riscv_vssubu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vssubu_vx_u16m8 | ( | ... | ) | __riscv_vssubu_vx_u16m8(__VA_ARGS__) |
| #define vssubu_vx_u16m8_m | ( | ... | ) | __riscv_vssubu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vssubu_vx_u16mf2 | ( | ... | ) | __riscv_vssubu_vx_u16mf2(__VA_ARGS__) |
| #define vssubu_vx_u16mf2_m | ( | ... | ) | __riscv_vssubu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vssubu_vx_u16mf4 | ( | ... | ) | __riscv_vssubu_vx_u16mf4(__VA_ARGS__) |
| #define vssubu_vx_u16mf4_m | ( | ... | ) | __riscv_vssubu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vssubu_vx_u32m1 | ( | ... | ) | __riscv_vssubu_vx_u32m1(__VA_ARGS__) |
| #define vssubu_vx_u32m1_m | ( | ... | ) | __riscv_vssubu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vssubu_vx_u32m2 | ( | ... | ) | __riscv_vssubu_vx_u32m2(__VA_ARGS__) |
| #define vssubu_vx_u32m2_m | ( | ... | ) | __riscv_vssubu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vssubu_vx_u32m4 | ( | ... | ) | __riscv_vssubu_vx_u32m4(__VA_ARGS__) |
| #define vssubu_vx_u32m4_m | ( | ... | ) | __riscv_vssubu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vssubu_vx_u32m8 | ( | ... | ) | __riscv_vssubu_vx_u32m8(__VA_ARGS__) |
| #define vssubu_vx_u32m8_m | ( | ... | ) | __riscv_vssubu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vssubu_vx_u32mf2 | ( | ... | ) | __riscv_vssubu_vx_u32mf2(__VA_ARGS__) |
| #define vssubu_vx_u32mf2_m | ( | ... | ) | __riscv_vssubu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vssubu_vx_u64m1 | ( | ... | ) | __riscv_vssubu_vx_u64m1(__VA_ARGS__) |
| #define vssubu_vx_u64m1_m | ( | ... | ) | __riscv_vssubu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vssubu_vx_u64m2 | ( | ... | ) | __riscv_vssubu_vx_u64m2(__VA_ARGS__) |
| #define vssubu_vx_u64m2_m | ( | ... | ) | __riscv_vssubu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vssubu_vx_u64m4 | ( | ... | ) | __riscv_vssubu_vx_u64m4(__VA_ARGS__) |
| #define vssubu_vx_u64m4_m | ( | ... | ) | __riscv_vssubu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vssubu_vx_u64m8 | ( | ... | ) | __riscv_vssubu_vx_u64m8(__VA_ARGS__) |
| #define vssubu_vx_u64m8_m | ( | ... | ) | __riscv_vssubu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vssubu_vx_u8m1 | ( | ... | ) | __riscv_vssubu_vx_u8m1(__VA_ARGS__) |
| #define vssubu_vx_u8m1_m | ( | ... | ) | __riscv_vssubu_vx_u8m1_tumu(__VA_ARGS__) |
| #define vssubu_vx_u8m2 | ( | ... | ) | __riscv_vssubu_vx_u8m2(__VA_ARGS__) |
| #define vssubu_vx_u8m2_m | ( | ... | ) | __riscv_vssubu_vx_u8m2_tumu(__VA_ARGS__) |
| #define vssubu_vx_u8m4 | ( | ... | ) | __riscv_vssubu_vx_u8m4(__VA_ARGS__) |
| #define vssubu_vx_u8m4_m | ( | ... | ) | __riscv_vssubu_vx_u8m4_tumu(__VA_ARGS__) |
| #define vssubu_vx_u8m8 | ( | ... | ) | __riscv_vssubu_vx_u8m8(__VA_ARGS__) |
| #define vssubu_vx_u8m8_m | ( | ... | ) | __riscv_vssubu_vx_u8m8_tumu(__VA_ARGS__) |
| #define vssubu_vx_u8mf2 | ( | ... | ) | __riscv_vssubu_vx_u8mf2(__VA_ARGS__) |
| #define vssubu_vx_u8mf2_m | ( | ... | ) | __riscv_vssubu_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vssubu_vx_u8mf4 | ( | ... | ) | __riscv_vssubu_vx_u8mf4(__VA_ARGS__) |
| #define vssubu_vx_u8mf4_m | ( | ... | ) | __riscv_vssubu_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vssubu_vx_u8mf8 | ( | ... | ) | __riscv_vssubu_vx_u8mf8(__VA_ARGS__) |
| #define vssubu_vx_u8mf8_m | ( | ... | ) | __riscv_vssubu_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vsub_vv_i16m1 | ( | ... | ) | __riscv_vsub_vv_i16m1(__VA_ARGS__) |
| #define vsub_vv_i16m1_m | ( | ... | ) | __riscv_vsub_vv_i16m1_tumu(__VA_ARGS__) |
| #define vsub_vv_i16m2 | ( | ... | ) | __riscv_vsub_vv_i16m2(__VA_ARGS__) |
| #define vsub_vv_i16m2_m | ( | ... | ) | __riscv_vsub_vv_i16m2_tumu(__VA_ARGS__) |
| #define vsub_vv_i16m4 | ( | ... | ) | __riscv_vsub_vv_i16m4(__VA_ARGS__) |
| #define vsub_vv_i16m4_m | ( | ... | ) | __riscv_vsub_vv_i16m4_tumu(__VA_ARGS__) |
| #define vsub_vv_i16m8 | ( | ... | ) | __riscv_vsub_vv_i16m8(__VA_ARGS__) |
| #define vsub_vv_i16m8_m | ( | ... | ) | __riscv_vsub_vv_i16m8_tumu(__VA_ARGS__) |
| #define vsub_vv_i16mf2 | ( | ... | ) | __riscv_vsub_vv_i16mf2(__VA_ARGS__) |
| #define vsub_vv_i16mf2_m | ( | ... | ) | __riscv_vsub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vsub_vv_i16mf4 | ( | ... | ) | __riscv_vsub_vv_i16mf4(__VA_ARGS__) |
| #define vsub_vv_i16mf4_m | ( | ... | ) | __riscv_vsub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vsub_vv_i32m1 | ( | ... | ) | __riscv_vsub_vv_i32m1(__VA_ARGS__) |
| #define vsub_vv_i32m1_m | ( | ... | ) | __riscv_vsub_vv_i32m1_tumu(__VA_ARGS__) |
| #define vsub_vv_i32m2 | ( | ... | ) | __riscv_vsub_vv_i32m2(__VA_ARGS__) |
| #define vsub_vv_i32m2_m | ( | ... | ) | __riscv_vsub_vv_i32m2_tumu(__VA_ARGS__) |
| #define vsub_vv_i32m4 | ( | ... | ) | __riscv_vsub_vv_i32m4(__VA_ARGS__) |
| #define vsub_vv_i32m4_m | ( | ... | ) | __riscv_vsub_vv_i32m4_tumu(__VA_ARGS__) |
| #define vsub_vv_i32m8 | ( | ... | ) | __riscv_vsub_vv_i32m8(__VA_ARGS__) |
| #define vsub_vv_i32m8_m | ( | ... | ) | __riscv_vsub_vv_i32m8_tumu(__VA_ARGS__) |
| #define vsub_vv_i32mf2 | ( | ... | ) | __riscv_vsub_vv_i32mf2(__VA_ARGS__) |
| #define vsub_vv_i32mf2_m | ( | ... | ) | __riscv_vsub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vsub_vv_i64m1 | ( | ... | ) | __riscv_vsub_vv_i64m1(__VA_ARGS__) |
| #define vsub_vv_i64m1_m | ( | ... | ) | __riscv_vsub_vv_i64m1_tumu(__VA_ARGS__) |
| #define vsub_vv_i64m2 | ( | ... | ) | __riscv_vsub_vv_i64m2(__VA_ARGS__) |
| #define vsub_vv_i64m2_m | ( | ... | ) | __riscv_vsub_vv_i64m2_tumu(__VA_ARGS__) |
| #define vsub_vv_i64m4 | ( | ... | ) | __riscv_vsub_vv_i64m4(__VA_ARGS__) |
| #define vsub_vv_i64m4_m | ( | ... | ) | __riscv_vsub_vv_i64m4_tumu(__VA_ARGS__) |
| #define vsub_vv_i64m8 | ( | ... | ) | __riscv_vsub_vv_i64m8(__VA_ARGS__) |
| #define vsub_vv_i64m8_m | ( | ... | ) | __riscv_vsub_vv_i64m8_tumu(__VA_ARGS__) |
| #define vsub_vv_i8m1 | ( | ... | ) | __riscv_vsub_vv_i8m1(__VA_ARGS__) |
| #define vsub_vv_i8m1_m | ( | ... | ) | __riscv_vsub_vv_i8m1_tumu(__VA_ARGS__) |
| #define vsub_vv_i8m2 | ( | ... | ) | __riscv_vsub_vv_i8m2(__VA_ARGS__) |
| #define vsub_vv_i8m2_m | ( | ... | ) | __riscv_vsub_vv_i8m2_tumu(__VA_ARGS__) |
| #define vsub_vv_i8m4 | ( | ... | ) | __riscv_vsub_vv_i8m4(__VA_ARGS__) |
| #define vsub_vv_i8m4_m | ( | ... | ) | __riscv_vsub_vv_i8m4_tumu(__VA_ARGS__) |
| #define vsub_vv_i8m8 | ( | ... | ) | __riscv_vsub_vv_i8m8(__VA_ARGS__) |
| #define vsub_vv_i8m8_m | ( | ... | ) | __riscv_vsub_vv_i8m8_tumu(__VA_ARGS__) |
| #define vsub_vv_i8mf2 | ( | ... | ) | __riscv_vsub_vv_i8mf2(__VA_ARGS__) |
| #define vsub_vv_i8mf2_m | ( | ... | ) | __riscv_vsub_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vsub_vv_i8mf4 | ( | ... | ) | __riscv_vsub_vv_i8mf4(__VA_ARGS__) |
| #define vsub_vv_i8mf4_m | ( | ... | ) | __riscv_vsub_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vsub_vv_i8mf8 | ( | ... | ) | __riscv_vsub_vv_i8mf8(__VA_ARGS__) |
| #define vsub_vv_i8mf8_m | ( | ... | ) | __riscv_vsub_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vsub_vv_u16m1 | ( | ... | ) | __riscv_vsub_vv_u16m1(__VA_ARGS__) |
| #define vsub_vv_u16m1_m | ( | ... | ) | __riscv_vsub_vv_u16m1_tumu(__VA_ARGS__) |
| #define vsub_vv_u16m2 | ( | ... | ) | __riscv_vsub_vv_u16m2(__VA_ARGS__) |
| #define vsub_vv_u16m2_m | ( | ... | ) | __riscv_vsub_vv_u16m2_tumu(__VA_ARGS__) |
| #define vsub_vv_u16m4 | ( | ... | ) | __riscv_vsub_vv_u16m4(__VA_ARGS__) |
| #define vsub_vv_u16m4_m | ( | ... | ) | __riscv_vsub_vv_u16m4_tumu(__VA_ARGS__) |
| #define vsub_vv_u16m8 | ( | ... | ) | __riscv_vsub_vv_u16m8(__VA_ARGS__) |
| #define vsub_vv_u16m8_m | ( | ... | ) | __riscv_vsub_vv_u16m8_tumu(__VA_ARGS__) |
| #define vsub_vv_u16mf2 | ( | ... | ) | __riscv_vsub_vv_u16mf2(__VA_ARGS__) |
| #define vsub_vv_u16mf2_m | ( | ... | ) | __riscv_vsub_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vsub_vv_u16mf4 | ( | ... | ) | __riscv_vsub_vv_u16mf4(__VA_ARGS__) |
| #define vsub_vv_u16mf4_m | ( | ... | ) | __riscv_vsub_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vsub_vv_u32m1 | ( | ... | ) | __riscv_vsub_vv_u32m1(__VA_ARGS__) |
| #define vsub_vv_u32m1_m | ( | ... | ) | __riscv_vsub_vv_u32m1_tumu(__VA_ARGS__) |
| #define vsub_vv_u32m2 | ( | ... | ) | __riscv_vsub_vv_u32m2(__VA_ARGS__) |
| #define vsub_vv_u32m2_m | ( | ... | ) | __riscv_vsub_vv_u32m2_tumu(__VA_ARGS__) |
| #define vsub_vv_u32m4 | ( | ... | ) | __riscv_vsub_vv_u32m4(__VA_ARGS__) |
| #define vsub_vv_u32m4_m | ( | ... | ) | __riscv_vsub_vv_u32m4_tumu(__VA_ARGS__) |
| #define vsub_vv_u32m8 | ( | ... | ) | __riscv_vsub_vv_u32m8(__VA_ARGS__) |
| #define vsub_vv_u32m8_m | ( | ... | ) | __riscv_vsub_vv_u32m8_tumu(__VA_ARGS__) |
| #define vsub_vv_u32mf2 | ( | ... | ) | __riscv_vsub_vv_u32mf2(__VA_ARGS__) |
| #define vsub_vv_u32mf2_m | ( | ... | ) | __riscv_vsub_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vsub_vv_u64m1 | ( | ... | ) | __riscv_vsub_vv_u64m1(__VA_ARGS__) |
| #define vsub_vv_u64m1_m | ( | ... | ) | __riscv_vsub_vv_u64m1_tumu(__VA_ARGS__) |
| #define vsub_vv_u64m2 | ( | ... | ) | __riscv_vsub_vv_u64m2(__VA_ARGS__) |
| #define vsub_vv_u64m2_m | ( | ... | ) | __riscv_vsub_vv_u64m2_tumu(__VA_ARGS__) |
| #define vsub_vv_u64m4 | ( | ... | ) | __riscv_vsub_vv_u64m4(__VA_ARGS__) |
| #define vsub_vv_u64m4_m | ( | ... | ) | __riscv_vsub_vv_u64m4_tumu(__VA_ARGS__) |
| #define vsub_vv_u64m8 | ( | ... | ) | __riscv_vsub_vv_u64m8(__VA_ARGS__) |
| #define vsub_vv_u64m8_m | ( | ... | ) | __riscv_vsub_vv_u64m8_tumu(__VA_ARGS__) |
| #define vsub_vv_u8m1 | ( | ... | ) | __riscv_vsub_vv_u8m1(__VA_ARGS__) |
| #define vsub_vv_u8m1_m | ( | ... | ) | __riscv_vsub_vv_u8m1_tumu(__VA_ARGS__) |
| #define vsub_vv_u8m2 | ( | ... | ) | __riscv_vsub_vv_u8m2(__VA_ARGS__) |
| #define vsub_vv_u8m2_m | ( | ... | ) | __riscv_vsub_vv_u8m2_tumu(__VA_ARGS__) |
| #define vsub_vv_u8m4 | ( | ... | ) | __riscv_vsub_vv_u8m4(__VA_ARGS__) |
| #define vsub_vv_u8m4_m | ( | ... | ) | __riscv_vsub_vv_u8m4_tumu(__VA_ARGS__) |
| #define vsub_vv_u8m8 | ( | ... | ) | __riscv_vsub_vv_u8m8(__VA_ARGS__) |
| #define vsub_vv_u8m8_m | ( | ... | ) | __riscv_vsub_vv_u8m8_tumu(__VA_ARGS__) |
| #define vsub_vv_u8mf2 | ( | ... | ) | __riscv_vsub_vv_u8mf2(__VA_ARGS__) |
| #define vsub_vv_u8mf2_m | ( | ... | ) | __riscv_vsub_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vsub_vv_u8mf4 | ( | ... | ) | __riscv_vsub_vv_u8mf4(__VA_ARGS__) |
| #define vsub_vv_u8mf4_m | ( | ... | ) | __riscv_vsub_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vsub_vv_u8mf8 | ( | ... | ) | __riscv_vsub_vv_u8mf8(__VA_ARGS__) |
| #define vsub_vv_u8mf8_m | ( | ... | ) | __riscv_vsub_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vsub_vx_i16m1 | ( | ... | ) | __riscv_vsub_vx_i16m1(__VA_ARGS__) |
| #define vsub_vx_i16m1_m | ( | ... | ) | __riscv_vsub_vx_i16m1_tumu(__VA_ARGS__) |
| #define vsub_vx_i16m2 | ( | ... | ) | __riscv_vsub_vx_i16m2(__VA_ARGS__) |
| #define vsub_vx_i16m2_m | ( | ... | ) | __riscv_vsub_vx_i16m2_tumu(__VA_ARGS__) |
| #define vsub_vx_i16m4 | ( | ... | ) | __riscv_vsub_vx_i16m4(__VA_ARGS__) |
| #define vsub_vx_i16m4_m | ( | ... | ) | __riscv_vsub_vx_i16m4_tumu(__VA_ARGS__) |
| #define vsub_vx_i16m8 | ( | ... | ) | __riscv_vsub_vx_i16m8(__VA_ARGS__) |
| #define vsub_vx_i16m8_m | ( | ... | ) | __riscv_vsub_vx_i16m8_tumu(__VA_ARGS__) |
| #define vsub_vx_i16mf2 | ( | ... | ) | __riscv_vsub_vx_i16mf2(__VA_ARGS__) |
| #define vsub_vx_i16mf2_m | ( | ... | ) | __riscv_vsub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vsub_vx_i16mf4 | ( | ... | ) | __riscv_vsub_vx_i16mf4(__VA_ARGS__) |
| #define vsub_vx_i16mf4_m | ( | ... | ) | __riscv_vsub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vsub_vx_i32m1 | ( | ... | ) | __riscv_vsub_vx_i32m1(__VA_ARGS__) |
| #define vsub_vx_i32m1_m | ( | ... | ) | __riscv_vsub_vx_i32m1_tumu(__VA_ARGS__) |
| #define vsub_vx_i32m2 | ( | ... | ) | __riscv_vsub_vx_i32m2(__VA_ARGS__) |
| #define vsub_vx_i32m2_m | ( | ... | ) | __riscv_vsub_vx_i32m2_tumu(__VA_ARGS__) |
| #define vsub_vx_i32m4 | ( | ... | ) | __riscv_vsub_vx_i32m4(__VA_ARGS__) |
| #define vsub_vx_i32m4_m | ( | ... | ) | __riscv_vsub_vx_i32m4_tumu(__VA_ARGS__) |
| #define vsub_vx_i32m8 | ( | ... | ) | __riscv_vsub_vx_i32m8(__VA_ARGS__) |
| #define vsub_vx_i32m8_m | ( | ... | ) | __riscv_vsub_vx_i32m8_tumu(__VA_ARGS__) |
| #define vsub_vx_i32mf2 | ( | ... | ) | __riscv_vsub_vx_i32mf2(__VA_ARGS__) |
| #define vsub_vx_i32mf2_m | ( | ... | ) | __riscv_vsub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vsub_vx_i64m1 | ( | ... | ) | __riscv_vsub_vx_i64m1(__VA_ARGS__) |
| #define vsub_vx_i64m1_m | ( | ... | ) | __riscv_vsub_vx_i64m1_tumu(__VA_ARGS__) |
| #define vsub_vx_i64m2 | ( | ... | ) | __riscv_vsub_vx_i64m2(__VA_ARGS__) |
| #define vsub_vx_i64m2_m | ( | ... | ) | __riscv_vsub_vx_i64m2_tumu(__VA_ARGS__) |
| #define vsub_vx_i64m4 | ( | ... | ) | __riscv_vsub_vx_i64m4(__VA_ARGS__) |
| #define vsub_vx_i64m4_m | ( | ... | ) | __riscv_vsub_vx_i64m4_tumu(__VA_ARGS__) |
| #define vsub_vx_i64m8 | ( | ... | ) | __riscv_vsub_vx_i64m8(__VA_ARGS__) |
| #define vsub_vx_i64m8_m | ( | ... | ) | __riscv_vsub_vx_i64m8_tumu(__VA_ARGS__) |
| #define vsub_vx_i8m1 | ( | ... | ) | __riscv_vsub_vx_i8m1(__VA_ARGS__) |
| #define vsub_vx_i8m1_m | ( | ... | ) | __riscv_vsub_vx_i8m1_tumu(__VA_ARGS__) |
| #define vsub_vx_i8m2 | ( | ... | ) | __riscv_vsub_vx_i8m2(__VA_ARGS__) |
| #define vsub_vx_i8m2_m | ( | ... | ) | __riscv_vsub_vx_i8m2_tumu(__VA_ARGS__) |
| #define vsub_vx_i8m4 | ( | ... | ) | __riscv_vsub_vx_i8m4(__VA_ARGS__) |
| #define vsub_vx_i8m4_m | ( | ... | ) | __riscv_vsub_vx_i8m4_tumu(__VA_ARGS__) |
| #define vsub_vx_i8m8 | ( | ... | ) | __riscv_vsub_vx_i8m8(__VA_ARGS__) |
| #define vsub_vx_i8m8_m | ( | ... | ) | __riscv_vsub_vx_i8m8_tumu(__VA_ARGS__) |
| #define vsub_vx_i8mf2 | ( | ... | ) | __riscv_vsub_vx_i8mf2(__VA_ARGS__) |
| #define vsub_vx_i8mf2_m | ( | ... | ) | __riscv_vsub_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vsub_vx_i8mf4 | ( | ... | ) | __riscv_vsub_vx_i8mf4(__VA_ARGS__) |
| #define vsub_vx_i8mf4_m | ( | ... | ) | __riscv_vsub_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vsub_vx_i8mf8 | ( | ... | ) | __riscv_vsub_vx_i8mf8(__VA_ARGS__) |
| #define vsub_vx_i8mf8_m | ( | ... | ) | __riscv_vsub_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vsub_vx_u16m1 | ( | ... | ) | __riscv_vsub_vx_u16m1(__VA_ARGS__) |
| #define vsub_vx_u16m1_m | ( | ... | ) | __riscv_vsub_vx_u16m1_tumu(__VA_ARGS__) |
| #define vsub_vx_u16m2 | ( | ... | ) | __riscv_vsub_vx_u16m2(__VA_ARGS__) |
| #define vsub_vx_u16m2_m | ( | ... | ) | __riscv_vsub_vx_u16m2_tumu(__VA_ARGS__) |
| #define vsub_vx_u16m4 | ( | ... | ) | __riscv_vsub_vx_u16m4(__VA_ARGS__) |
| #define vsub_vx_u16m4_m | ( | ... | ) | __riscv_vsub_vx_u16m4_tumu(__VA_ARGS__) |
| #define vsub_vx_u16m8 | ( | ... | ) | __riscv_vsub_vx_u16m8(__VA_ARGS__) |
| #define vsub_vx_u16m8_m | ( | ... | ) | __riscv_vsub_vx_u16m8_tumu(__VA_ARGS__) |
| #define vsub_vx_u16mf2 | ( | ... | ) | __riscv_vsub_vx_u16mf2(__VA_ARGS__) |
| #define vsub_vx_u16mf2_m | ( | ... | ) | __riscv_vsub_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vsub_vx_u16mf4 | ( | ... | ) | __riscv_vsub_vx_u16mf4(__VA_ARGS__) |
| #define vsub_vx_u16mf4_m | ( | ... | ) | __riscv_vsub_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vsub_vx_u32m1 | ( | ... | ) | __riscv_vsub_vx_u32m1(__VA_ARGS__) |
| #define vsub_vx_u32m1_m | ( | ... | ) | __riscv_vsub_vx_u32m1_tumu(__VA_ARGS__) |
| #define vsub_vx_u32m2 | ( | ... | ) | __riscv_vsub_vx_u32m2(__VA_ARGS__) |
| #define vsub_vx_u32m2_m | ( | ... | ) | __riscv_vsub_vx_u32m2_tumu(__VA_ARGS__) |
| #define vsub_vx_u32m4 | ( | ... | ) | __riscv_vsub_vx_u32m4(__VA_ARGS__) |
| #define vsub_vx_u32m4_m | ( | ... | ) | __riscv_vsub_vx_u32m4_tumu(__VA_ARGS__) |
| #define vsub_vx_u32m8 | ( | ... | ) | __riscv_vsub_vx_u32m8(__VA_ARGS__) |
| #define vsub_vx_u32m8_m | ( | ... | ) | __riscv_vsub_vx_u32m8_tumu(__VA_ARGS__) |
| #define vsub_vx_u32mf2 | ( | ... | ) | __riscv_vsub_vx_u32mf2(__VA_ARGS__) |
| #define vsub_vx_u32mf2_m | ( | ... | ) | __riscv_vsub_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vsub_vx_u64m1 | ( | ... | ) | __riscv_vsub_vx_u64m1(__VA_ARGS__) |
| #define vsub_vx_u64m1_m | ( | ... | ) | __riscv_vsub_vx_u64m1_tumu(__VA_ARGS__) |
| #define vsub_vx_u64m2 | ( | ... | ) | __riscv_vsub_vx_u64m2(__VA_ARGS__) |
| #define vsub_vx_u64m2_m | ( | ... | ) | __riscv_vsub_vx_u64m2_tumu(__VA_ARGS__) |
| #define vsub_vx_u64m4 | ( | ... | ) | __riscv_vsub_vx_u64m4(__VA_ARGS__) |
| #define vsub_vx_u64m4_m | ( | ... | ) | __riscv_vsub_vx_u64m4_tumu(__VA_ARGS__) |
| #define vsub_vx_u64m8 | ( | ... | ) | __riscv_vsub_vx_u64m8(__VA_ARGS__) |
| #define vsub_vx_u64m8_m | ( | ... | ) | __riscv_vsub_vx_u64m8_tumu(__VA_ARGS__) |
| #define vsub_vx_u8m1 | ( | ... | ) | __riscv_vsub_vx_u8m1(__VA_ARGS__) |
| #define vsub_vx_u8m1_m | ( | ... | ) | __riscv_vsub_vx_u8m1_tumu(__VA_ARGS__) |
| #define vsub_vx_u8m2 | ( | ... | ) | __riscv_vsub_vx_u8m2(__VA_ARGS__) |
| #define vsub_vx_u8m2_m | ( | ... | ) | __riscv_vsub_vx_u8m2_tumu(__VA_ARGS__) |
| #define vsub_vx_u8m4 | ( | ... | ) | __riscv_vsub_vx_u8m4(__VA_ARGS__) |
| #define vsub_vx_u8m4_m | ( | ... | ) | __riscv_vsub_vx_u8m4_tumu(__VA_ARGS__) |
| #define vsub_vx_u8m8 | ( | ... | ) | __riscv_vsub_vx_u8m8(__VA_ARGS__) |
| #define vsub_vx_u8m8_m | ( | ... | ) | __riscv_vsub_vx_u8m8_tumu(__VA_ARGS__) |
| #define vsub_vx_u8mf2 | ( | ... | ) | __riscv_vsub_vx_u8mf2(__VA_ARGS__) |
| #define vsub_vx_u8mf2_m | ( | ... | ) | __riscv_vsub_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vsub_vx_u8mf4 | ( | ... | ) | __riscv_vsub_vx_u8mf4(__VA_ARGS__) |
| #define vsub_vx_u8mf4_m | ( | ... | ) | __riscv_vsub_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vsub_vx_u8mf8 | ( | ... | ) | __riscv_vsub_vx_u8mf8(__VA_ARGS__) |
| #define vsub_vx_u8mf8_m | ( | ... | ) | __riscv_vsub_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vsuxei16_v_f16m1 | ( | ... | ) | __riscv_vsuxei16_v_f16m1(__VA_ARGS__) |
| #define vsuxei16_v_f16m1_m | ( | ... | ) | __riscv_vsuxei16_v_f16m1_m(__VA_ARGS__) |
| #define vsuxei16_v_f16m2 | ( | ... | ) | __riscv_vsuxei16_v_f16m2(__VA_ARGS__) |
| #define vsuxei16_v_f16m2_m | ( | ... | ) | __riscv_vsuxei16_v_f16m2_m(__VA_ARGS__) |
| #define vsuxei16_v_f16m4 | ( | ... | ) | __riscv_vsuxei16_v_f16m4(__VA_ARGS__) |
| #define vsuxei16_v_f16m4_m | ( | ... | ) | __riscv_vsuxei16_v_f16m4_m(__VA_ARGS__) |
| #define vsuxei16_v_f16m8 | ( | ... | ) | __riscv_vsuxei16_v_f16m8(__VA_ARGS__) |
| #define vsuxei16_v_f16m8_m | ( | ... | ) | __riscv_vsuxei16_v_f16m8_m(__VA_ARGS__) |
| #define vsuxei16_v_f16mf2 | ( | ... | ) | __riscv_vsuxei16_v_f16mf2(__VA_ARGS__) |
| #define vsuxei16_v_f16mf2_m | ( | ... | ) | __riscv_vsuxei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxei16_v_f16mf4 | ( | ... | ) | __riscv_vsuxei16_v_f16mf4(__VA_ARGS__) |
| #define vsuxei16_v_f16mf4_m | ( | ... | ) | __riscv_vsuxei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxei16_v_f32m1 | ( | ... | ) | __riscv_vsuxei16_v_f32m1(__VA_ARGS__) |
| #define vsuxei16_v_f32m1_m | ( | ... | ) | __riscv_vsuxei16_v_f32m1_m(__VA_ARGS__) |
| #define vsuxei16_v_f32m2 | ( | ... | ) | __riscv_vsuxei16_v_f32m2(__VA_ARGS__) |
| #define vsuxei16_v_f32m2_m | ( | ... | ) | __riscv_vsuxei16_v_f32m2_m(__VA_ARGS__) |
| #define vsuxei16_v_f32m4 | ( | ... | ) | __riscv_vsuxei16_v_f32m4(__VA_ARGS__) |
| #define vsuxei16_v_f32m4_m | ( | ... | ) | __riscv_vsuxei16_v_f32m4_m(__VA_ARGS__) |
| #define vsuxei16_v_f32m8 | ( | ... | ) | __riscv_vsuxei16_v_f32m8(__VA_ARGS__) |
| #define vsuxei16_v_f32m8_m | ( | ... | ) | __riscv_vsuxei16_v_f32m8_m(__VA_ARGS__) |
| #define vsuxei16_v_f32mf2 | ( | ... | ) | __riscv_vsuxei16_v_f32mf2(__VA_ARGS__) |
| #define vsuxei16_v_f32mf2_m | ( | ... | ) | __riscv_vsuxei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxei16_v_f64m1 | ( | ... | ) | __riscv_vsuxei16_v_f64m1(__VA_ARGS__) |
| #define vsuxei16_v_f64m1_m | ( | ... | ) | __riscv_vsuxei16_v_f64m1_m(__VA_ARGS__) |
| #define vsuxei16_v_f64m2 | ( | ... | ) | __riscv_vsuxei16_v_f64m2(__VA_ARGS__) |
| #define vsuxei16_v_f64m2_m | ( | ... | ) | __riscv_vsuxei16_v_f64m2_m(__VA_ARGS__) |
| #define vsuxei16_v_f64m4 | ( | ... | ) | __riscv_vsuxei16_v_f64m4(__VA_ARGS__) |
| #define vsuxei16_v_f64m4_m | ( | ... | ) | __riscv_vsuxei16_v_f64m4_m(__VA_ARGS__) |
| #define vsuxei16_v_f64m8 | ( | ... | ) | __riscv_vsuxei16_v_f64m8(__VA_ARGS__) |
| #define vsuxei16_v_f64m8_m | ( | ... | ) | __riscv_vsuxei16_v_f64m8_m(__VA_ARGS__) |
| #define vsuxei16_v_i16m1 | ( | ... | ) | __riscv_vsuxei16_v_i16m1(__VA_ARGS__) |
| #define vsuxei16_v_i16m1_m | ( | ... | ) | __riscv_vsuxei16_v_i16m1_m(__VA_ARGS__) |
| #define vsuxei16_v_i16m2 | ( | ... | ) | __riscv_vsuxei16_v_i16m2(__VA_ARGS__) |
| #define vsuxei16_v_i16m2_m | ( | ... | ) | __riscv_vsuxei16_v_i16m2_m(__VA_ARGS__) |
| #define vsuxei16_v_i16m4 | ( | ... | ) | __riscv_vsuxei16_v_i16m4(__VA_ARGS__) |
| #define vsuxei16_v_i16m4_m | ( | ... | ) | __riscv_vsuxei16_v_i16m4_m(__VA_ARGS__) |
| #define vsuxei16_v_i16m8 | ( | ... | ) | __riscv_vsuxei16_v_i16m8(__VA_ARGS__) |
| #define vsuxei16_v_i16m8_m | ( | ... | ) | __riscv_vsuxei16_v_i16m8_m(__VA_ARGS__) |
| #define vsuxei16_v_i16mf2 | ( | ... | ) | __riscv_vsuxei16_v_i16mf2(__VA_ARGS__) |
| #define vsuxei16_v_i16mf2_m | ( | ... | ) | __riscv_vsuxei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxei16_v_i16mf4 | ( | ... | ) | __riscv_vsuxei16_v_i16mf4(__VA_ARGS__) |
| #define vsuxei16_v_i16mf4_m | ( | ... | ) | __riscv_vsuxei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxei16_v_i32m1 | ( | ... | ) | __riscv_vsuxei16_v_i32m1(__VA_ARGS__) |
| #define vsuxei16_v_i32m1_m | ( | ... | ) | __riscv_vsuxei16_v_i32m1_m(__VA_ARGS__) |
| #define vsuxei16_v_i32m2 | ( | ... | ) | __riscv_vsuxei16_v_i32m2(__VA_ARGS__) |
| #define vsuxei16_v_i32m2_m | ( | ... | ) | __riscv_vsuxei16_v_i32m2_m(__VA_ARGS__) |
| #define vsuxei16_v_i32m4 | ( | ... | ) | __riscv_vsuxei16_v_i32m4(__VA_ARGS__) |
| #define vsuxei16_v_i32m4_m | ( | ... | ) | __riscv_vsuxei16_v_i32m4_m(__VA_ARGS__) |
| #define vsuxei16_v_i32m8 | ( | ... | ) | __riscv_vsuxei16_v_i32m8(__VA_ARGS__) |
| #define vsuxei16_v_i32m8_m | ( | ... | ) | __riscv_vsuxei16_v_i32m8_m(__VA_ARGS__) |
| #define vsuxei16_v_i32mf2 | ( | ... | ) | __riscv_vsuxei16_v_i32mf2(__VA_ARGS__) |
| #define vsuxei16_v_i32mf2_m | ( | ... | ) | __riscv_vsuxei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxei16_v_i64m1 | ( | ... | ) | __riscv_vsuxei16_v_i64m1(__VA_ARGS__) |
| #define vsuxei16_v_i64m1_m | ( | ... | ) | __riscv_vsuxei16_v_i64m1_m(__VA_ARGS__) |
| #define vsuxei16_v_i64m2 | ( | ... | ) | __riscv_vsuxei16_v_i64m2(__VA_ARGS__) |
| #define vsuxei16_v_i64m2_m | ( | ... | ) | __riscv_vsuxei16_v_i64m2_m(__VA_ARGS__) |
| #define vsuxei16_v_i64m4 | ( | ... | ) | __riscv_vsuxei16_v_i64m4(__VA_ARGS__) |
| #define vsuxei16_v_i64m4_m | ( | ... | ) | __riscv_vsuxei16_v_i64m4_m(__VA_ARGS__) |
| #define vsuxei16_v_i64m8 | ( | ... | ) | __riscv_vsuxei16_v_i64m8(__VA_ARGS__) |
| #define vsuxei16_v_i64m8_m | ( | ... | ) | __riscv_vsuxei16_v_i64m8_m(__VA_ARGS__) |
| #define vsuxei16_v_i8m1 | ( | ... | ) | __riscv_vsuxei16_v_i8m1(__VA_ARGS__) |
| #define vsuxei16_v_i8m1_m | ( | ... | ) | __riscv_vsuxei16_v_i8m1_m(__VA_ARGS__) |
| #define vsuxei16_v_i8m2 | ( | ... | ) | __riscv_vsuxei16_v_i8m2(__VA_ARGS__) |
| #define vsuxei16_v_i8m2_m | ( | ... | ) | __riscv_vsuxei16_v_i8m2_m(__VA_ARGS__) |
| #define vsuxei16_v_i8m4 | ( | ... | ) | __riscv_vsuxei16_v_i8m4(__VA_ARGS__) |
| #define vsuxei16_v_i8m4_m | ( | ... | ) | __riscv_vsuxei16_v_i8m4_m(__VA_ARGS__) |
| #define vsuxei16_v_i8mf2 | ( | ... | ) | __riscv_vsuxei16_v_i8mf2(__VA_ARGS__) |
| #define vsuxei16_v_i8mf2_m | ( | ... | ) | __riscv_vsuxei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxei16_v_i8mf4 | ( | ... | ) | __riscv_vsuxei16_v_i8mf4(__VA_ARGS__) |
| #define vsuxei16_v_i8mf4_m | ( | ... | ) | __riscv_vsuxei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxei16_v_i8mf8 | ( | ... | ) | __riscv_vsuxei16_v_i8mf8(__VA_ARGS__) |
| #define vsuxei16_v_i8mf8_m | ( | ... | ) | __riscv_vsuxei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxei16_v_u16m1 | ( | ... | ) | __riscv_vsuxei16_v_u16m1(__VA_ARGS__) |
| #define vsuxei16_v_u16m1_m | ( | ... | ) | __riscv_vsuxei16_v_u16m1_m(__VA_ARGS__) |
| #define vsuxei16_v_u16m2 | ( | ... | ) | __riscv_vsuxei16_v_u16m2(__VA_ARGS__) |
| #define vsuxei16_v_u16m2_m | ( | ... | ) | __riscv_vsuxei16_v_u16m2_m(__VA_ARGS__) |
| #define vsuxei16_v_u16m4 | ( | ... | ) | __riscv_vsuxei16_v_u16m4(__VA_ARGS__) |
| #define vsuxei16_v_u16m4_m | ( | ... | ) | __riscv_vsuxei16_v_u16m4_m(__VA_ARGS__) |
| #define vsuxei16_v_u16m8 | ( | ... | ) | __riscv_vsuxei16_v_u16m8(__VA_ARGS__) |
| #define vsuxei16_v_u16m8_m | ( | ... | ) | __riscv_vsuxei16_v_u16m8_m(__VA_ARGS__) |
| #define vsuxei16_v_u16mf2 | ( | ... | ) | __riscv_vsuxei16_v_u16mf2(__VA_ARGS__) |
| #define vsuxei16_v_u16mf2_m | ( | ... | ) | __riscv_vsuxei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxei16_v_u16mf4 | ( | ... | ) | __riscv_vsuxei16_v_u16mf4(__VA_ARGS__) |
| #define vsuxei16_v_u16mf4_m | ( | ... | ) | __riscv_vsuxei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxei16_v_u32m1 | ( | ... | ) | __riscv_vsuxei16_v_u32m1(__VA_ARGS__) |
| #define vsuxei16_v_u32m1_m | ( | ... | ) | __riscv_vsuxei16_v_u32m1_m(__VA_ARGS__) |
| #define vsuxei16_v_u32m2 | ( | ... | ) | __riscv_vsuxei16_v_u32m2(__VA_ARGS__) |
| #define vsuxei16_v_u32m2_m | ( | ... | ) | __riscv_vsuxei16_v_u32m2_m(__VA_ARGS__) |
| #define vsuxei16_v_u32m4 | ( | ... | ) | __riscv_vsuxei16_v_u32m4(__VA_ARGS__) |
| #define vsuxei16_v_u32m4_m | ( | ... | ) | __riscv_vsuxei16_v_u32m4_m(__VA_ARGS__) |
| #define vsuxei16_v_u32m8 | ( | ... | ) | __riscv_vsuxei16_v_u32m8(__VA_ARGS__) |
| #define vsuxei16_v_u32m8_m | ( | ... | ) | __riscv_vsuxei16_v_u32m8_m(__VA_ARGS__) |
| #define vsuxei16_v_u32mf2 | ( | ... | ) | __riscv_vsuxei16_v_u32mf2(__VA_ARGS__) |
| #define vsuxei16_v_u32mf2_m | ( | ... | ) | __riscv_vsuxei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxei16_v_u64m1 | ( | ... | ) | __riscv_vsuxei16_v_u64m1(__VA_ARGS__) |
| #define vsuxei16_v_u64m1_m | ( | ... | ) | __riscv_vsuxei16_v_u64m1_m(__VA_ARGS__) |
| #define vsuxei16_v_u64m2 | ( | ... | ) | __riscv_vsuxei16_v_u64m2(__VA_ARGS__) |
| #define vsuxei16_v_u64m2_m | ( | ... | ) | __riscv_vsuxei16_v_u64m2_m(__VA_ARGS__) |
| #define vsuxei16_v_u64m4 | ( | ... | ) | __riscv_vsuxei16_v_u64m4(__VA_ARGS__) |
| #define vsuxei16_v_u64m4_m | ( | ... | ) | __riscv_vsuxei16_v_u64m4_m(__VA_ARGS__) |
| #define vsuxei16_v_u64m8 | ( | ... | ) | __riscv_vsuxei16_v_u64m8(__VA_ARGS__) |
| #define vsuxei16_v_u64m8_m | ( | ... | ) | __riscv_vsuxei16_v_u64m8_m(__VA_ARGS__) |
| #define vsuxei16_v_u8m1 | ( | ... | ) | __riscv_vsuxei16_v_u8m1(__VA_ARGS__) |
| #define vsuxei16_v_u8m1_m | ( | ... | ) | __riscv_vsuxei16_v_u8m1_m(__VA_ARGS__) |
| #define vsuxei16_v_u8m2 | ( | ... | ) | __riscv_vsuxei16_v_u8m2(__VA_ARGS__) |
| #define vsuxei16_v_u8m2_m | ( | ... | ) | __riscv_vsuxei16_v_u8m2_m(__VA_ARGS__) |
| #define vsuxei16_v_u8m4 | ( | ... | ) | __riscv_vsuxei16_v_u8m4(__VA_ARGS__) |
| #define vsuxei16_v_u8m4_m | ( | ... | ) | __riscv_vsuxei16_v_u8m4_m(__VA_ARGS__) |
| #define vsuxei16_v_u8mf2 | ( | ... | ) | __riscv_vsuxei16_v_u8mf2(__VA_ARGS__) |
| #define vsuxei16_v_u8mf2_m | ( | ... | ) | __riscv_vsuxei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxei16_v_u8mf4 | ( | ... | ) | __riscv_vsuxei16_v_u8mf4(__VA_ARGS__) |
| #define vsuxei16_v_u8mf4_m | ( | ... | ) | __riscv_vsuxei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxei16_v_u8mf8 | ( | ... | ) | __riscv_vsuxei16_v_u8mf8(__VA_ARGS__) |
| #define vsuxei16_v_u8mf8_m | ( | ... | ) | __riscv_vsuxei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxei32_v_f16m1 | ( | ... | ) | __riscv_vsuxei32_v_f16m1(__VA_ARGS__) |
| #define vsuxei32_v_f16m1_m | ( | ... | ) | __riscv_vsuxei32_v_f16m1_m(__VA_ARGS__) |
| #define vsuxei32_v_f16m2 | ( | ... | ) | __riscv_vsuxei32_v_f16m2(__VA_ARGS__) |
| #define vsuxei32_v_f16m2_m | ( | ... | ) | __riscv_vsuxei32_v_f16m2_m(__VA_ARGS__) |
| #define vsuxei32_v_f16m4 | ( | ... | ) | __riscv_vsuxei32_v_f16m4(__VA_ARGS__) |
| #define vsuxei32_v_f16m4_m | ( | ... | ) | __riscv_vsuxei32_v_f16m4_m(__VA_ARGS__) |
| #define vsuxei32_v_f16mf2 | ( | ... | ) | __riscv_vsuxei32_v_f16mf2(__VA_ARGS__) |
| #define vsuxei32_v_f16mf2_m | ( | ... | ) | __riscv_vsuxei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxei32_v_f16mf4 | ( | ... | ) | __riscv_vsuxei32_v_f16mf4(__VA_ARGS__) |
| #define vsuxei32_v_f16mf4_m | ( | ... | ) | __riscv_vsuxei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxei32_v_f32m1 | ( | ... | ) | __riscv_vsuxei32_v_f32m1(__VA_ARGS__) |
| #define vsuxei32_v_f32m1_m | ( | ... | ) | __riscv_vsuxei32_v_f32m1_m(__VA_ARGS__) |
| #define vsuxei32_v_f32m2 | ( | ... | ) | __riscv_vsuxei32_v_f32m2(__VA_ARGS__) |
| #define vsuxei32_v_f32m2_m | ( | ... | ) | __riscv_vsuxei32_v_f32m2_m(__VA_ARGS__) |
| #define vsuxei32_v_f32m4 | ( | ... | ) | __riscv_vsuxei32_v_f32m4(__VA_ARGS__) |
| #define vsuxei32_v_f32m4_m | ( | ... | ) | __riscv_vsuxei32_v_f32m4_m(__VA_ARGS__) |
| #define vsuxei32_v_f32m8 | ( | ... | ) | __riscv_vsuxei32_v_f32m8(__VA_ARGS__) |
| #define vsuxei32_v_f32m8_m | ( | ... | ) | __riscv_vsuxei32_v_f32m8_m(__VA_ARGS__) |
| #define vsuxei32_v_f32mf2 | ( | ... | ) | __riscv_vsuxei32_v_f32mf2(__VA_ARGS__) |
| #define vsuxei32_v_f32mf2_m | ( | ... | ) | __riscv_vsuxei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxei32_v_f64m1 | ( | ... | ) | __riscv_vsuxei32_v_f64m1(__VA_ARGS__) |
| #define vsuxei32_v_f64m1_m | ( | ... | ) | __riscv_vsuxei32_v_f64m1_m(__VA_ARGS__) |
| #define vsuxei32_v_f64m2 | ( | ... | ) | __riscv_vsuxei32_v_f64m2(__VA_ARGS__) |
| #define vsuxei32_v_f64m2_m | ( | ... | ) | __riscv_vsuxei32_v_f64m2_m(__VA_ARGS__) |
| #define vsuxei32_v_f64m4 | ( | ... | ) | __riscv_vsuxei32_v_f64m4(__VA_ARGS__) |
| #define vsuxei32_v_f64m4_m | ( | ... | ) | __riscv_vsuxei32_v_f64m4_m(__VA_ARGS__) |
| #define vsuxei32_v_f64m8 | ( | ... | ) | __riscv_vsuxei32_v_f64m8(__VA_ARGS__) |
| #define vsuxei32_v_f64m8_m | ( | ... | ) | __riscv_vsuxei32_v_f64m8_m(__VA_ARGS__) |
| #define vsuxei32_v_i16m1 | ( | ... | ) | __riscv_vsuxei32_v_i16m1(__VA_ARGS__) |
| #define vsuxei32_v_i16m1_m | ( | ... | ) | __riscv_vsuxei32_v_i16m1_m(__VA_ARGS__) |
| #define vsuxei32_v_i16m2 | ( | ... | ) | __riscv_vsuxei32_v_i16m2(__VA_ARGS__) |
| #define vsuxei32_v_i16m2_m | ( | ... | ) | __riscv_vsuxei32_v_i16m2_m(__VA_ARGS__) |
| #define vsuxei32_v_i16m4 | ( | ... | ) | __riscv_vsuxei32_v_i16m4(__VA_ARGS__) |
| #define vsuxei32_v_i16m4_m | ( | ... | ) | __riscv_vsuxei32_v_i16m4_m(__VA_ARGS__) |
| #define vsuxei32_v_i16mf2 | ( | ... | ) | __riscv_vsuxei32_v_i16mf2(__VA_ARGS__) |
| #define vsuxei32_v_i16mf2_m | ( | ... | ) | __riscv_vsuxei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxei32_v_i16mf4 | ( | ... | ) | __riscv_vsuxei32_v_i16mf4(__VA_ARGS__) |
| #define vsuxei32_v_i16mf4_m | ( | ... | ) | __riscv_vsuxei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxei32_v_i32m1 | ( | ... | ) | __riscv_vsuxei32_v_i32m1(__VA_ARGS__) |
| #define vsuxei32_v_i32m1_m | ( | ... | ) | __riscv_vsuxei32_v_i32m1_m(__VA_ARGS__) |
| #define vsuxei32_v_i32m2 | ( | ... | ) | __riscv_vsuxei32_v_i32m2(__VA_ARGS__) |
| #define vsuxei32_v_i32m2_m | ( | ... | ) | __riscv_vsuxei32_v_i32m2_m(__VA_ARGS__) |
| #define vsuxei32_v_i32m4 | ( | ... | ) | __riscv_vsuxei32_v_i32m4(__VA_ARGS__) |
| #define vsuxei32_v_i32m4_m | ( | ... | ) | __riscv_vsuxei32_v_i32m4_m(__VA_ARGS__) |
| #define vsuxei32_v_i32m8 | ( | ... | ) | __riscv_vsuxei32_v_i32m8(__VA_ARGS__) |
| #define vsuxei32_v_i32m8_m | ( | ... | ) | __riscv_vsuxei32_v_i32m8_m(__VA_ARGS__) |
| #define vsuxei32_v_i32mf2 | ( | ... | ) | __riscv_vsuxei32_v_i32mf2(__VA_ARGS__) |
| #define vsuxei32_v_i32mf2_m | ( | ... | ) | __riscv_vsuxei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxei32_v_i64m1 | ( | ... | ) | __riscv_vsuxei32_v_i64m1(__VA_ARGS__) |
| #define vsuxei32_v_i64m1_m | ( | ... | ) | __riscv_vsuxei32_v_i64m1_m(__VA_ARGS__) |
| #define vsuxei32_v_i64m2 | ( | ... | ) | __riscv_vsuxei32_v_i64m2(__VA_ARGS__) |
| #define vsuxei32_v_i64m2_m | ( | ... | ) | __riscv_vsuxei32_v_i64m2_m(__VA_ARGS__) |
| #define vsuxei32_v_i64m4 | ( | ... | ) | __riscv_vsuxei32_v_i64m4(__VA_ARGS__) |
| #define vsuxei32_v_i64m4_m | ( | ... | ) | __riscv_vsuxei32_v_i64m4_m(__VA_ARGS__) |
| #define vsuxei32_v_i64m8 | ( | ... | ) | __riscv_vsuxei32_v_i64m8(__VA_ARGS__) |
| #define vsuxei32_v_i64m8_m | ( | ... | ) | __riscv_vsuxei32_v_i64m8_m(__VA_ARGS__) |
| #define vsuxei32_v_i8m1 | ( | ... | ) | __riscv_vsuxei32_v_i8m1(__VA_ARGS__) |
| #define vsuxei32_v_i8m1_m | ( | ... | ) | __riscv_vsuxei32_v_i8m1_m(__VA_ARGS__) |
| #define vsuxei32_v_i8m2 | ( | ... | ) | __riscv_vsuxei32_v_i8m2(__VA_ARGS__) |
| #define vsuxei32_v_i8m2_m | ( | ... | ) | __riscv_vsuxei32_v_i8m2_m(__VA_ARGS__) |
| #define vsuxei32_v_i8mf2 | ( | ... | ) | __riscv_vsuxei32_v_i8mf2(__VA_ARGS__) |
| #define vsuxei32_v_i8mf2_m | ( | ... | ) | __riscv_vsuxei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxei32_v_i8mf4 | ( | ... | ) | __riscv_vsuxei32_v_i8mf4(__VA_ARGS__) |
| #define vsuxei32_v_i8mf4_m | ( | ... | ) | __riscv_vsuxei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxei32_v_i8mf8 | ( | ... | ) | __riscv_vsuxei32_v_i8mf8(__VA_ARGS__) |
| #define vsuxei32_v_i8mf8_m | ( | ... | ) | __riscv_vsuxei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxei32_v_u16m1 | ( | ... | ) | __riscv_vsuxei32_v_u16m1(__VA_ARGS__) |
| #define vsuxei32_v_u16m1_m | ( | ... | ) | __riscv_vsuxei32_v_u16m1_m(__VA_ARGS__) |
| #define vsuxei32_v_u16m2 | ( | ... | ) | __riscv_vsuxei32_v_u16m2(__VA_ARGS__) |
| #define vsuxei32_v_u16m2_m | ( | ... | ) | __riscv_vsuxei32_v_u16m2_m(__VA_ARGS__) |
| #define vsuxei32_v_u16m4 | ( | ... | ) | __riscv_vsuxei32_v_u16m4(__VA_ARGS__) |
| #define vsuxei32_v_u16m4_m | ( | ... | ) | __riscv_vsuxei32_v_u16m4_m(__VA_ARGS__) |
| #define vsuxei32_v_u16mf2 | ( | ... | ) | __riscv_vsuxei32_v_u16mf2(__VA_ARGS__) |
| #define vsuxei32_v_u16mf2_m | ( | ... | ) | __riscv_vsuxei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxei32_v_u16mf4 | ( | ... | ) | __riscv_vsuxei32_v_u16mf4(__VA_ARGS__) |
| #define vsuxei32_v_u16mf4_m | ( | ... | ) | __riscv_vsuxei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxei32_v_u32m1 | ( | ... | ) | __riscv_vsuxei32_v_u32m1(__VA_ARGS__) |
| #define vsuxei32_v_u32m1_m | ( | ... | ) | __riscv_vsuxei32_v_u32m1_m(__VA_ARGS__) |
| #define vsuxei32_v_u32m2 | ( | ... | ) | __riscv_vsuxei32_v_u32m2(__VA_ARGS__) |
| #define vsuxei32_v_u32m2_m | ( | ... | ) | __riscv_vsuxei32_v_u32m2_m(__VA_ARGS__) |
| #define vsuxei32_v_u32m4 | ( | ... | ) | __riscv_vsuxei32_v_u32m4(__VA_ARGS__) |
| #define vsuxei32_v_u32m4_m | ( | ... | ) | __riscv_vsuxei32_v_u32m4_m(__VA_ARGS__) |
| #define vsuxei32_v_u32m8 | ( | ... | ) | __riscv_vsuxei32_v_u32m8(__VA_ARGS__) |
| #define vsuxei32_v_u32m8_m | ( | ... | ) | __riscv_vsuxei32_v_u32m8_m(__VA_ARGS__) |
| #define vsuxei32_v_u32mf2 | ( | ... | ) | __riscv_vsuxei32_v_u32mf2(__VA_ARGS__) |
| #define vsuxei32_v_u32mf2_m | ( | ... | ) | __riscv_vsuxei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxei32_v_u64m1 | ( | ... | ) | __riscv_vsuxei32_v_u64m1(__VA_ARGS__) |
| #define vsuxei32_v_u64m1_m | ( | ... | ) | __riscv_vsuxei32_v_u64m1_m(__VA_ARGS__) |
| #define vsuxei32_v_u64m2 | ( | ... | ) | __riscv_vsuxei32_v_u64m2(__VA_ARGS__) |
| #define vsuxei32_v_u64m2_m | ( | ... | ) | __riscv_vsuxei32_v_u64m2_m(__VA_ARGS__) |
| #define vsuxei32_v_u64m4 | ( | ... | ) | __riscv_vsuxei32_v_u64m4(__VA_ARGS__) |
| #define vsuxei32_v_u64m4_m | ( | ... | ) | __riscv_vsuxei32_v_u64m4_m(__VA_ARGS__) |
| #define vsuxei32_v_u64m8 | ( | ... | ) | __riscv_vsuxei32_v_u64m8(__VA_ARGS__) |
| #define vsuxei32_v_u64m8_m | ( | ... | ) | __riscv_vsuxei32_v_u64m8_m(__VA_ARGS__) |
| #define vsuxei32_v_u8m1 | ( | ... | ) | __riscv_vsuxei32_v_u8m1(__VA_ARGS__) |
| #define vsuxei32_v_u8m1_m | ( | ... | ) | __riscv_vsuxei32_v_u8m1_m(__VA_ARGS__) |
| #define vsuxei32_v_u8m2 | ( | ... | ) | __riscv_vsuxei32_v_u8m2(__VA_ARGS__) |
| #define vsuxei32_v_u8m2_m | ( | ... | ) | __riscv_vsuxei32_v_u8m2_m(__VA_ARGS__) |
| #define vsuxei32_v_u8mf2 | ( | ... | ) | __riscv_vsuxei32_v_u8mf2(__VA_ARGS__) |
| #define vsuxei32_v_u8mf2_m | ( | ... | ) | __riscv_vsuxei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxei32_v_u8mf4 | ( | ... | ) | __riscv_vsuxei32_v_u8mf4(__VA_ARGS__) |
| #define vsuxei32_v_u8mf4_m | ( | ... | ) | __riscv_vsuxei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxei32_v_u8mf8 | ( | ... | ) | __riscv_vsuxei32_v_u8mf8(__VA_ARGS__) |
| #define vsuxei32_v_u8mf8_m | ( | ... | ) | __riscv_vsuxei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxei64_v_f16m1 | ( | ... | ) | __riscv_vsuxei64_v_f16m1(__VA_ARGS__) |
| #define vsuxei64_v_f16m1_m | ( | ... | ) | __riscv_vsuxei64_v_f16m1_m(__VA_ARGS__) |
| #define vsuxei64_v_f16m2 | ( | ... | ) | __riscv_vsuxei64_v_f16m2(__VA_ARGS__) |
| #define vsuxei64_v_f16m2_m | ( | ... | ) | __riscv_vsuxei64_v_f16m2_m(__VA_ARGS__) |
| #define vsuxei64_v_f16mf2 | ( | ... | ) | __riscv_vsuxei64_v_f16mf2(__VA_ARGS__) |
| #define vsuxei64_v_f16mf2_m | ( | ... | ) | __riscv_vsuxei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxei64_v_f16mf4 | ( | ... | ) | __riscv_vsuxei64_v_f16mf4(__VA_ARGS__) |
| #define vsuxei64_v_f16mf4_m | ( | ... | ) | __riscv_vsuxei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxei64_v_f32m1 | ( | ... | ) | __riscv_vsuxei64_v_f32m1(__VA_ARGS__) |
| #define vsuxei64_v_f32m1_m | ( | ... | ) | __riscv_vsuxei64_v_f32m1_m(__VA_ARGS__) |
| #define vsuxei64_v_f32m2 | ( | ... | ) | __riscv_vsuxei64_v_f32m2(__VA_ARGS__) |
| #define vsuxei64_v_f32m2_m | ( | ... | ) | __riscv_vsuxei64_v_f32m2_m(__VA_ARGS__) |
| #define vsuxei64_v_f32m4 | ( | ... | ) | __riscv_vsuxei64_v_f32m4(__VA_ARGS__) |
| #define vsuxei64_v_f32m4_m | ( | ... | ) | __riscv_vsuxei64_v_f32m4_m(__VA_ARGS__) |
| #define vsuxei64_v_f32mf2 | ( | ... | ) | __riscv_vsuxei64_v_f32mf2(__VA_ARGS__) |
| #define vsuxei64_v_f32mf2_m | ( | ... | ) | __riscv_vsuxei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxei64_v_f64m1 | ( | ... | ) | __riscv_vsuxei64_v_f64m1(__VA_ARGS__) |
| #define vsuxei64_v_f64m1_m | ( | ... | ) | __riscv_vsuxei64_v_f64m1_m(__VA_ARGS__) |
| #define vsuxei64_v_f64m2 | ( | ... | ) | __riscv_vsuxei64_v_f64m2(__VA_ARGS__) |
| #define vsuxei64_v_f64m2_m | ( | ... | ) | __riscv_vsuxei64_v_f64m2_m(__VA_ARGS__) |
| #define vsuxei64_v_f64m4 | ( | ... | ) | __riscv_vsuxei64_v_f64m4(__VA_ARGS__) |
| #define vsuxei64_v_f64m4_m | ( | ... | ) | __riscv_vsuxei64_v_f64m4_m(__VA_ARGS__) |
| #define vsuxei64_v_f64m8 | ( | ... | ) | __riscv_vsuxei64_v_f64m8(__VA_ARGS__) |
| #define vsuxei64_v_f64m8_m | ( | ... | ) | __riscv_vsuxei64_v_f64m8_m(__VA_ARGS__) |
| #define vsuxei64_v_i16m1 | ( | ... | ) | __riscv_vsuxei64_v_i16m1(__VA_ARGS__) |
| #define vsuxei64_v_i16m1_m | ( | ... | ) | __riscv_vsuxei64_v_i16m1_m(__VA_ARGS__) |
| #define vsuxei64_v_i16m2 | ( | ... | ) | __riscv_vsuxei64_v_i16m2(__VA_ARGS__) |
| #define vsuxei64_v_i16m2_m | ( | ... | ) | __riscv_vsuxei64_v_i16m2_m(__VA_ARGS__) |
| #define vsuxei64_v_i16mf2 | ( | ... | ) | __riscv_vsuxei64_v_i16mf2(__VA_ARGS__) |
| #define vsuxei64_v_i16mf2_m | ( | ... | ) | __riscv_vsuxei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxei64_v_i16mf4 | ( | ... | ) | __riscv_vsuxei64_v_i16mf4(__VA_ARGS__) |
| #define vsuxei64_v_i16mf4_m | ( | ... | ) | __riscv_vsuxei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxei64_v_i32m1 | ( | ... | ) | __riscv_vsuxei64_v_i32m1(__VA_ARGS__) |
| #define vsuxei64_v_i32m1_m | ( | ... | ) | __riscv_vsuxei64_v_i32m1_m(__VA_ARGS__) |
| #define vsuxei64_v_i32m2 | ( | ... | ) | __riscv_vsuxei64_v_i32m2(__VA_ARGS__) |
| #define vsuxei64_v_i32m2_m | ( | ... | ) | __riscv_vsuxei64_v_i32m2_m(__VA_ARGS__) |
| #define vsuxei64_v_i32m4 | ( | ... | ) | __riscv_vsuxei64_v_i32m4(__VA_ARGS__) |
| #define vsuxei64_v_i32m4_m | ( | ... | ) | __riscv_vsuxei64_v_i32m4_m(__VA_ARGS__) |
| #define vsuxei64_v_i32mf2 | ( | ... | ) | __riscv_vsuxei64_v_i32mf2(__VA_ARGS__) |
| #define vsuxei64_v_i32mf2_m | ( | ... | ) | __riscv_vsuxei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxei64_v_i64m1 | ( | ... | ) | __riscv_vsuxei64_v_i64m1(__VA_ARGS__) |
| #define vsuxei64_v_i64m1_m | ( | ... | ) | __riscv_vsuxei64_v_i64m1_m(__VA_ARGS__) |
| #define vsuxei64_v_i64m2 | ( | ... | ) | __riscv_vsuxei64_v_i64m2(__VA_ARGS__) |
| #define vsuxei64_v_i64m2_m | ( | ... | ) | __riscv_vsuxei64_v_i64m2_m(__VA_ARGS__) |
| #define vsuxei64_v_i64m4 | ( | ... | ) | __riscv_vsuxei64_v_i64m4(__VA_ARGS__) |
| #define vsuxei64_v_i64m4_m | ( | ... | ) | __riscv_vsuxei64_v_i64m4_m(__VA_ARGS__) |
| #define vsuxei64_v_i64m8 | ( | ... | ) | __riscv_vsuxei64_v_i64m8(__VA_ARGS__) |
| #define vsuxei64_v_i64m8_m | ( | ... | ) | __riscv_vsuxei64_v_i64m8_m(__VA_ARGS__) |
| #define vsuxei64_v_i8m1 | ( | ... | ) | __riscv_vsuxei64_v_i8m1(__VA_ARGS__) |
| #define vsuxei64_v_i8m1_m | ( | ... | ) | __riscv_vsuxei64_v_i8m1_m(__VA_ARGS__) |
| #define vsuxei64_v_i8mf2 | ( | ... | ) | __riscv_vsuxei64_v_i8mf2(__VA_ARGS__) |
| #define vsuxei64_v_i8mf2_m | ( | ... | ) | __riscv_vsuxei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxei64_v_i8mf4 | ( | ... | ) | __riscv_vsuxei64_v_i8mf4(__VA_ARGS__) |
| #define vsuxei64_v_i8mf4_m | ( | ... | ) | __riscv_vsuxei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxei64_v_i8mf8 | ( | ... | ) | __riscv_vsuxei64_v_i8mf8(__VA_ARGS__) |
| #define vsuxei64_v_i8mf8_m | ( | ... | ) | __riscv_vsuxei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxei64_v_u16m1 | ( | ... | ) | __riscv_vsuxei64_v_u16m1(__VA_ARGS__) |
| #define vsuxei64_v_u16m1_m | ( | ... | ) | __riscv_vsuxei64_v_u16m1_m(__VA_ARGS__) |
| #define vsuxei64_v_u16m2 | ( | ... | ) | __riscv_vsuxei64_v_u16m2(__VA_ARGS__) |
| #define vsuxei64_v_u16m2_m | ( | ... | ) | __riscv_vsuxei64_v_u16m2_m(__VA_ARGS__) |
| #define vsuxei64_v_u16mf2 | ( | ... | ) | __riscv_vsuxei64_v_u16mf2(__VA_ARGS__) |
| #define vsuxei64_v_u16mf2_m | ( | ... | ) | __riscv_vsuxei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxei64_v_u16mf4 | ( | ... | ) | __riscv_vsuxei64_v_u16mf4(__VA_ARGS__) |
| #define vsuxei64_v_u16mf4_m | ( | ... | ) | __riscv_vsuxei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxei64_v_u32m1 | ( | ... | ) | __riscv_vsuxei64_v_u32m1(__VA_ARGS__) |
| #define vsuxei64_v_u32m1_m | ( | ... | ) | __riscv_vsuxei64_v_u32m1_m(__VA_ARGS__) |
| #define vsuxei64_v_u32m2 | ( | ... | ) | __riscv_vsuxei64_v_u32m2(__VA_ARGS__) |
| #define vsuxei64_v_u32m2_m | ( | ... | ) | __riscv_vsuxei64_v_u32m2_m(__VA_ARGS__) |
| #define vsuxei64_v_u32m4 | ( | ... | ) | __riscv_vsuxei64_v_u32m4(__VA_ARGS__) |
| #define vsuxei64_v_u32m4_m | ( | ... | ) | __riscv_vsuxei64_v_u32m4_m(__VA_ARGS__) |
| #define vsuxei64_v_u32mf2 | ( | ... | ) | __riscv_vsuxei64_v_u32mf2(__VA_ARGS__) |
| #define vsuxei64_v_u32mf2_m | ( | ... | ) | __riscv_vsuxei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxei64_v_u64m1 | ( | ... | ) | __riscv_vsuxei64_v_u64m1(__VA_ARGS__) |
| #define vsuxei64_v_u64m1_m | ( | ... | ) | __riscv_vsuxei64_v_u64m1_m(__VA_ARGS__) |
| #define vsuxei64_v_u64m2 | ( | ... | ) | __riscv_vsuxei64_v_u64m2(__VA_ARGS__) |
| #define vsuxei64_v_u64m2_m | ( | ... | ) | __riscv_vsuxei64_v_u64m2_m(__VA_ARGS__) |
| #define vsuxei64_v_u64m4 | ( | ... | ) | __riscv_vsuxei64_v_u64m4(__VA_ARGS__) |
| #define vsuxei64_v_u64m4_m | ( | ... | ) | __riscv_vsuxei64_v_u64m4_m(__VA_ARGS__) |
| #define vsuxei64_v_u64m8 | ( | ... | ) | __riscv_vsuxei64_v_u64m8(__VA_ARGS__) |
| #define vsuxei64_v_u64m8_m | ( | ... | ) | __riscv_vsuxei64_v_u64m8_m(__VA_ARGS__) |
| #define vsuxei64_v_u8m1 | ( | ... | ) | __riscv_vsuxei64_v_u8m1(__VA_ARGS__) |
| #define vsuxei64_v_u8m1_m | ( | ... | ) | __riscv_vsuxei64_v_u8m1_m(__VA_ARGS__) |
| #define vsuxei64_v_u8mf2 | ( | ... | ) | __riscv_vsuxei64_v_u8mf2(__VA_ARGS__) |
| #define vsuxei64_v_u8mf2_m | ( | ... | ) | __riscv_vsuxei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxei64_v_u8mf4 | ( | ... | ) | __riscv_vsuxei64_v_u8mf4(__VA_ARGS__) |
| #define vsuxei64_v_u8mf4_m | ( | ... | ) | __riscv_vsuxei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxei64_v_u8mf8 | ( | ... | ) | __riscv_vsuxei64_v_u8mf8(__VA_ARGS__) |
| #define vsuxei64_v_u8mf8_m | ( | ... | ) | __riscv_vsuxei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxei8_v_f16m1 | ( | ... | ) | __riscv_vsuxei8_v_f16m1(__VA_ARGS__) |
| #define vsuxei8_v_f16m1_m | ( | ... | ) | __riscv_vsuxei8_v_f16m1_m(__VA_ARGS__) |
| #define vsuxei8_v_f16m2 | ( | ... | ) | __riscv_vsuxei8_v_f16m2(__VA_ARGS__) |
| #define vsuxei8_v_f16m2_m | ( | ... | ) | __riscv_vsuxei8_v_f16m2_m(__VA_ARGS__) |
| #define vsuxei8_v_f16m4 | ( | ... | ) | __riscv_vsuxei8_v_f16m4(__VA_ARGS__) |
| #define vsuxei8_v_f16m4_m | ( | ... | ) | __riscv_vsuxei8_v_f16m4_m(__VA_ARGS__) |
| #define vsuxei8_v_f16m8 | ( | ... | ) | __riscv_vsuxei8_v_f16m8(__VA_ARGS__) |
| #define vsuxei8_v_f16m8_m | ( | ... | ) | __riscv_vsuxei8_v_f16m8_m(__VA_ARGS__) |
| #define vsuxei8_v_f16mf2 | ( | ... | ) | __riscv_vsuxei8_v_f16mf2(__VA_ARGS__) |
| #define vsuxei8_v_f16mf2_m | ( | ... | ) | __riscv_vsuxei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxei8_v_f16mf4 | ( | ... | ) | __riscv_vsuxei8_v_f16mf4(__VA_ARGS__) |
| #define vsuxei8_v_f16mf4_m | ( | ... | ) | __riscv_vsuxei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxei8_v_f32m1 | ( | ... | ) | __riscv_vsuxei8_v_f32m1(__VA_ARGS__) |
| #define vsuxei8_v_f32m1_m | ( | ... | ) | __riscv_vsuxei8_v_f32m1_m(__VA_ARGS__) |
| #define vsuxei8_v_f32m2 | ( | ... | ) | __riscv_vsuxei8_v_f32m2(__VA_ARGS__) |
| #define vsuxei8_v_f32m2_m | ( | ... | ) | __riscv_vsuxei8_v_f32m2_m(__VA_ARGS__) |
| #define vsuxei8_v_f32m4 | ( | ... | ) | __riscv_vsuxei8_v_f32m4(__VA_ARGS__) |
| #define vsuxei8_v_f32m4_m | ( | ... | ) | __riscv_vsuxei8_v_f32m4_m(__VA_ARGS__) |
| #define vsuxei8_v_f32m8 | ( | ... | ) | __riscv_vsuxei8_v_f32m8(__VA_ARGS__) |
| #define vsuxei8_v_f32m8_m | ( | ... | ) | __riscv_vsuxei8_v_f32m8_m(__VA_ARGS__) |
| #define vsuxei8_v_f32mf2 | ( | ... | ) | __riscv_vsuxei8_v_f32mf2(__VA_ARGS__) |
| #define vsuxei8_v_f32mf2_m | ( | ... | ) | __riscv_vsuxei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxei8_v_f64m1 | ( | ... | ) | __riscv_vsuxei8_v_f64m1(__VA_ARGS__) |
| #define vsuxei8_v_f64m1_m | ( | ... | ) | __riscv_vsuxei8_v_f64m1_m(__VA_ARGS__) |
| #define vsuxei8_v_f64m2 | ( | ... | ) | __riscv_vsuxei8_v_f64m2(__VA_ARGS__) |
| #define vsuxei8_v_f64m2_m | ( | ... | ) | __riscv_vsuxei8_v_f64m2_m(__VA_ARGS__) |
| #define vsuxei8_v_f64m4 | ( | ... | ) | __riscv_vsuxei8_v_f64m4(__VA_ARGS__) |
| #define vsuxei8_v_f64m4_m | ( | ... | ) | __riscv_vsuxei8_v_f64m4_m(__VA_ARGS__) |
| #define vsuxei8_v_f64m8 | ( | ... | ) | __riscv_vsuxei8_v_f64m8(__VA_ARGS__) |
| #define vsuxei8_v_f64m8_m | ( | ... | ) | __riscv_vsuxei8_v_f64m8_m(__VA_ARGS__) |
| #define vsuxei8_v_i16m1 | ( | ... | ) | __riscv_vsuxei8_v_i16m1(__VA_ARGS__) |
| #define vsuxei8_v_i16m1_m | ( | ... | ) | __riscv_vsuxei8_v_i16m1_m(__VA_ARGS__) |
| #define vsuxei8_v_i16m2 | ( | ... | ) | __riscv_vsuxei8_v_i16m2(__VA_ARGS__) |
| #define vsuxei8_v_i16m2_m | ( | ... | ) | __riscv_vsuxei8_v_i16m2_m(__VA_ARGS__) |
| #define vsuxei8_v_i16m4 | ( | ... | ) | __riscv_vsuxei8_v_i16m4(__VA_ARGS__) |
| #define vsuxei8_v_i16m4_m | ( | ... | ) | __riscv_vsuxei8_v_i16m4_m(__VA_ARGS__) |
| #define vsuxei8_v_i16m8 | ( | ... | ) | __riscv_vsuxei8_v_i16m8(__VA_ARGS__) |
| #define vsuxei8_v_i16m8_m | ( | ... | ) | __riscv_vsuxei8_v_i16m8_m(__VA_ARGS__) |
| #define vsuxei8_v_i16mf2 | ( | ... | ) | __riscv_vsuxei8_v_i16mf2(__VA_ARGS__) |
| #define vsuxei8_v_i16mf2_m | ( | ... | ) | __riscv_vsuxei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxei8_v_i16mf4 | ( | ... | ) | __riscv_vsuxei8_v_i16mf4(__VA_ARGS__) |
| #define vsuxei8_v_i16mf4_m | ( | ... | ) | __riscv_vsuxei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxei8_v_i32m1 | ( | ... | ) | __riscv_vsuxei8_v_i32m1(__VA_ARGS__) |
| #define vsuxei8_v_i32m1_m | ( | ... | ) | __riscv_vsuxei8_v_i32m1_m(__VA_ARGS__) |
| #define vsuxei8_v_i32m2 | ( | ... | ) | __riscv_vsuxei8_v_i32m2(__VA_ARGS__) |
| #define vsuxei8_v_i32m2_m | ( | ... | ) | __riscv_vsuxei8_v_i32m2_m(__VA_ARGS__) |
| #define vsuxei8_v_i32m4 | ( | ... | ) | __riscv_vsuxei8_v_i32m4(__VA_ARGS__) |
| #define vsuxei8_v_i32m4_m | ( | ... | ) | __riscv_vsuxei8_v_i32m4_m(__VA_ARGS__) |
| #define vsuxei8_v_i32m8 | ( | ... | ) | __riscv_vsuxei8_v_i32m8(__VA_ARGS__) |
| #define vsuxei8_v_i32m8_m | ( | ... | ) | __riscv_vsuxei8_v_i32m8_m(__VA_ARGS__) |
| #define vsuxei8_v_i32mf2 | ( | ... | ) | __riscv_vsuxei8_v_i32mf2(__VA_ARGS__) |
| #define vsuxei8_v_i32mf2_m | ( | ... | ) | __riscv_vsuxei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxei8_v_i64m1 | ( | ... | ) | __riscv_vsuxei8_v_i64m1(__VA_ARGS__) |
| #define vsuxei8_v_i64m1_m | ( | ... | ) | __riscv_vsuxei8_v_i64m1_m(__VA_ARGS__) |
| #define vsuxei8_v_i64m2 | ( | ... | ) | __riscv_vsuxei8_v_i64m2(__VA_ARGS__) |
| #define vsuxei8_v_i64m2_m | ( | ... | ) | __riscv_vsuxei8_v_i64m2_m(__VA_ARGS__) |
| #define vsuxei8_v_i64m4 | ( | ... | ) | __riscv_vsuxei8_v_i64m4(__VA_ARGS__) |
| #define vsuxei8_v_i64m4_m | ( | ... | ) | __riscv_vsuxei8_v_i64m4_m(__VA_ARGS__) |
| #define vsuxei8_v_i64m8 | ( | ... | ) | __riscv_vsuxei8_v_i64m8(__VA_ARGS__) |
| #define vsuxei8_v_i64m8_m | ( | ... | ) | __riscv_vsuxei8_v_i64m8_m(__VA_ARGS__) |
| #define vsuxei8_v_i8m1 | ( | ... | ) | __riscv_vsuxei8_v_i8m1(__VA_ARGS__) |
| #define vsuxei8_v_i8m1_m | ( | ... | ) | __riscv_vsuxei8_v_i8m1_m(__VA_ARGS__) |
| #define vsuxei8_v_i8m2 | ( | ... | ) | __riscv_vsuxei8_v_i8m2(__VA_ARGS__) |
| #define vsuxei8_v_i8m2_m | ( | ... | ) | __riscv_vsuxei8_v_i8m2_m(__VA_ARGS__) |
| #define vsuxei8_v_i8m4 | ( | ... | ) | __riscv_vsuxei8_v_i8m4(__VA_ARGS__) |
| #define vsuxei8_v_i8m4_m | ( | ... | ) | __riscv_vsuxei8_v_i8m4_m(__VA_ARGS__) |
| #define vsuxei8_v_i8m8 | ( | ... | ) | __riscv_vsuxei8_v_i8m8(__VA_ARGS__) |
| #define vsuxei8_v_i8m8_m | ( | ... | ) | __riscv_vsuxei8_v_i8m8_m(__VA_ARGS__) |
| #define vsuxei8_v_i8mf2 | ( | ... | ) | __riscv_vsuxei8_v_i8mf2(__VA_ARGS__) |
| #define vsuxei8_v_i8mf2_m | ( | ... | ) | __riscv_vsuxei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxei8_v_i8mf4 | ( | ... | ) | __riscv_vsuxei8_v_i8mf4(__VA_ARGS__) |
| #define vsuxei8_v_i8mf4_m | ( | ... | ) | __riscv_vsuxei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxei8_v_i8mf8 | ( | ... | ) | __riscv_vsuxei8_v_i8mf8(__VA_ARGS__) |
| #define vsuxei8_v_i8mf8_m | ( | ... | ) | __riscv_vsuxei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxei8_v_u16m1 | ( | ... | ) | __riscv_vsuxei8_v_u16m1(__VA_ARGS__) |
| #define vsuxei8_v_u16m1_m | ( | ... | ) | __riscv_vsuxei8_v_u16m1_m(__VA_ARGS__) |
| #define vsuxei8_v_u16m2 | ( | ... | ) | __riscv_vsuxei8_v_u16m2(__VA_ARGS__) |
| #define vsuxei8_v_u16m2_m | ( | ... | ) | __riscv_vsuxei8_v_u16m2_m(__VA_ARGS__) |
| #define vsuxei8_v_u16m4 | ( | ... | ) | __riscv_vsuxei8_v_u16m4(__VA_ARGS__) |
| #define vsuxei8_v_u16m4_m | ( | ... | ) | __riscv_vsuxei8_v_u16m4_m(__VA_ARGS__) |
| #define vsuxei8_v_u16m8 | ( | ... | ) | __riscv_vsuxei8_v_u16m8(__VA_ARGS__) |
| #define vsuxei8_v_u16m8_m | ( | ... | ) | __riscv_vsuxei8_v_u16m8_m(__VA_ARGS__) |
| #define vsuxei8_v_u16mf2 | ( | ... | ) | __riscv_vsuxei8_v_u16mf2(__VA_ARGS__) |
| #define vsuxei8_v_u16mf2_m | ( | ... | ) | __riscv_vsuxei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxei8_v_u16mf4 | ( | ... | ) | __riscv_vsuxei8_v_u16mf4(__VA_ARGS__) |
| #define vsuxei8_v_u16mf4_m | ( | ... | ) | __riscv_vsuxei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxei8_v_u32m1 | ( | ... | ) | __riscv_vsuxei8_v_u32m1(__VA_ARGS__) |
| #define vsuxei8_v_u32m1_m | ( | ... | ) | __riscv_vsuxei8_v_u32m1_m(__VA_ARGS__) |
| #define vsuxei8_v_u32m2 | ( | ... | ) | __riscv_vsuxei8_v_u32m2(__VA_ARGS__) |
| #define vsuxei8_v_u32m2_m | ( | ... | ) | __riscv_vsuxei8_v_u32m2_m(__VA_ARGS__) |
| #define vsuxei8_v_u32m4 | ( | ... | ) | __riscv_vsuxei8_v_u32m4(__VA_ARGS__) |
| #define vsuxei8_v_u32m4_m | ( | ... | ) | __riscv_vsuxei8_v_u32m4_m(__VA_ARGS__) |
| #define vsuxei8_v_u32m8 | ( | ... | ) | __riscv_vsuxei8_v_u32m8(__VA_ARGS__) |
| #define vsuxei8_v_u32m8_m | ( | ... | ) | __riscv_vsuxei8_v_u32m8_m(__VA_ARGS__) |
| #define vsuxei8_v_u32mf2 | ( | ... | ) | __riscv_vsuxei8_v_u32mf2(__VA_ARGS__) |
| #define vsuxei8_v_u32mf2_m | ( | ... | ) | __riscv_vsuxei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxei8_v_u64m1 | ( | ... | ) | __riscv_vsuxei8_v_u64m1(__VA_ARGS__) |
| #define vsuxei8_v_u64m1_m | ( | ... | ) | __riscv_vsuxei8_v_u64m1_m(__VA_ARGS__) |
| #define vsuxei8_v_u64m2 | ( | ... | ) | __riscv_vsuxei8_v_u64m2(__VA_ARGS__) |
| #define vsuxei8_v_u64m2_m | ( | ... | ) | __riscv_vsuxei8_v_u64m2_m(__VA_ARGS__) |
| #define vsuxei8_v_u64m4 | ( | ... | ) | __riscv_vsuxei8_v_u64m4(__VA_ARGS__) |
| #define vsuxei8_v_u64m4_m | ( | ... | ) | __riscv_vsuxei8_v_u64m4_m(__VA_ARGS__) |
| #define vsuxei8_v_u64m8 | ( | ... | ) | __riscv_vsuxei8_v_u64m8(__VA_ARGS__) |
| #define vsuxei8_v_u64m8_m | ( | ... | ) | __riscv_vsuxei8_v_u64m8_m(__VA_ARGS__) |
| #define vsuxei8_v_u8m1 | ( | ... | ) | __riscv_vsuxei8_v_u8m1(__VA_ARGS__) |
| #define vsuxei8_v_u8m1_m | ( | ... | ) | __riscv_vsuxei8_v_u8m1_m(__VA_ARGS__) |
| #define vsuxei8_v_u8m2 | ( | ... | ) | __riscv_vsuxei8_v_u8m2(__VA_ARGS__) |
| #define vsuxei8_v_u8m2_m | ( | ... | ) | __riscv_vsuxei8_v_u8m2_m(__VA_ARGS__) |
| #define vsuxei8_v_u8m4 | ( | ... | ) | __riscv_vsuxei8_v_u8m4(__VA_ARGS__) |
| #define vsuxei8_v_u8m4_m | ( | ... | ) | __riscv_vsuxei8_v_u8m4_m(__VA_ARGS__) |
| #define vsuxei8_v_u8m8 | ( | ... | ) | __riscv_vsuxei8_v_u8m8(__VA_ARGS__) |
| #define vsuxei8_v_u8m8_m | ( | ... | ) | __riscv_vsuxei8_v_u8m8_m(__VA_ARGS__) |
| #define vsuxei8_v_u8mf2 | ( | ... | ) | __riscv_vsuxei8_v_u8mf2(__VA_ARGS__) |
| #define vsuxei8_v_u8mf2_m | ( | ... | ) | __riscv_vsuxei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxei8_v_u8mf4 | ( | ... | ) | __riscv_vsuxei8_v_u8mf4(__VA_ARGS__) |
| #define vsuxei8_v_u8mf4_m | ( | ... | ) | __riscv_vsuxei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxei8_v_u8mf8 | ( | ... | ) | __riscv_vsuxei8_v_u8mf8(__VA_ARGS__) |
| #define vsuxei8_v_u8mf8_m | ( | ... | ) | __riscv_vsuxei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f32m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f32m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f32m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f32m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f32m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f32m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f32m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f64m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f64m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f64m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f64m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f64m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_f64m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_f64m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_f64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i32m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i32m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i32m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i32m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i32m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i32m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i32m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i64m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i64m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i64m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i64m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i64m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i64m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i64m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg2ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u32m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u32m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u32m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u32m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u32m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u32m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u32m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u64m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u64m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u64m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u64m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u64m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u64m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u64m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8m1 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8m1(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8m2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8m2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8m4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8m4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8m4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8m4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg2ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg2ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f32m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f32m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f32m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f32m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f32m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f32m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f32m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f64m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f64m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f64m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f64m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f64m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_f64m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_f64m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_f64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i32m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i32m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i32m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i32m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i32m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i32m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i32m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i64m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i64m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i64m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i64m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i64m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i64m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i64m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg2ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u32m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u32m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u32m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u32m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u32m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u32m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u32m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u64m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u64m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u64m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u64m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u64m4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u64m4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u64m4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8m1 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8m1(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8m2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8m2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg2ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg2ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f16m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f16m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f16m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f16m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f32m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f32m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f32m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f32m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f32m4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f32m4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f32m4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f64m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f64m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f64m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f64m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f64m4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_f64m4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_f64m4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_f64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i16m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i16m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i16m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i16m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i32m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i32m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i32m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i32m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i32m4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i32m4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i32m4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i64m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i64m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i64m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i64m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i64m4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i64m4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i64m4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i8m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i8m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg2ei64_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg2ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u16m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u16m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u16m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u16m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u32m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u32m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u32m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u32m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u32m4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u32m4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u32m4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u64m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u64m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u64m2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u64m2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u64m4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u64m4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u64m4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u8m1 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u8m1(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg2ei64_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg2ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg2ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f32m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f32m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f32m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f32m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f32m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f32m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f32m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f64m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f64m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f64m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f64m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f64m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_f64m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_f64m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_f64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i32m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i32m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i32m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i32m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i32m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i32m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i32m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i64m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i64m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i64m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i64m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i64m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i64m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i64m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg2ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u32m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u32m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u32m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u32m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u32m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u32m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u32m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u32m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u64m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u64m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u64m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u64m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u64m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u64m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u64m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u64m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8m1 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8m1(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8m2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8m2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8m4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8m4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8m4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8m4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg2ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg2ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f16m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f16m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f16m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f16m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f32m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f32m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f32m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f32m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f64m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f64m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f64m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_f64m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i16m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i16m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i16m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i16m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i32m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i32m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i32m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i32m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i64m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i64m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i64m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i64m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg3ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u16m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u16m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u16m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u16m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u32m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u32m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u32m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u32m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u64m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u64m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u64m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u64m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8m1 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8m1(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8m2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8m2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg3ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg3ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f16m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f16m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f16m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f16m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f32m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f32m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f32m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f32m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f64m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f64m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f64m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_f64m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i16m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i16m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i16m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i16m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i32m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i32m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i32m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i32m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i64m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i64m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i64m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i64m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg3ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u16m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u16m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u16m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u16m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u32m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u32m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u32m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u32m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u64m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u64m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u64m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u64m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8m1 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8m1(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8m2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8m2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg3ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg3ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f16m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f16m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f16m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f16m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f32m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f32m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f32m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f32m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f64m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f64m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f64m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_f64m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i16m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i16m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i16m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i16m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i32m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i32m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i32m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i32m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i64m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i64m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i64m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i64m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i8m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i8m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg3ei64_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg3ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u16m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u16m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u16m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u16m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u32m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u32m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u32m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u32m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u64m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u64m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u64m2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u64m2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u8m1 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u8m1(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg3ei64_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg3ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg3ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f16m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f16m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f16m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f16m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f32m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f32m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f32m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f32m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f64m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f64m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f64m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_f64m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i16m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i16m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i16m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i16m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i32m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i32m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i32m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i32m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i64m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i64m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i64m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i64m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg3ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u16m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u16m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u16m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u16m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u32m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u32m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u32m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u32m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u64m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u64m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u64m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u64m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8m1 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8m1(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8m2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8m2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg3ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg3ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f16m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f16m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f16m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f16m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f32m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f32m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f32m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f32m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f64m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f64m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f64m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_f64m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i16m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i16m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i16m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i16m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i32m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i32m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i32m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i32m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i64m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i64m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i64m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i64m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg4ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u16m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u16m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u16m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u16m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u32m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u32m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u32m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u32m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u64m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u64m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u64m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u64m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8m1 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8m1(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8m2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8m2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg4ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg4ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f16m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f16m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f16m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f16m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f32m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f32m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f32m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f32m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f64m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f64m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f64m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_f64m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i16m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i16m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i16m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i16m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i32m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i32m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i32m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i32m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i64m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i64m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i64m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i64m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg4ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u16m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u16m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u16m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u16m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u32m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u32m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u32m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u32m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u64m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u64m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u64m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u64m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8m1 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8m1(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8m2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8m2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg4ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg4ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f16m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f16m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f16m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f16m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f32m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f32m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f32m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f32m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f64m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f64m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f64m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_f64m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i16m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i16m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i16m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i16m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i32m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i32m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i32m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i32m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i64m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i64m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i64m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i64m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i8m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i8m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg4ei64_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg4ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u16m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u16m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u16m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u16m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u32m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u32m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u32m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u32m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u64m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u64m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u64m2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u64m2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u8m1 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u8m1(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg4ei64_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg4ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg4ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f16m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f16m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f16m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f16m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f16m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f32m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f32m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f32m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f32m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f32m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f64m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f64m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f64m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_f64m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_f64m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_f64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i16m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i16m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i16m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i16m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i16m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i32m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i32m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i32m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i32m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i32m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i64m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i64m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i64m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i64m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i64m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg4ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u16m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u16m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u16m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u16m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u16m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u16m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u32m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u32m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u32m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u32m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u32m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u32m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u64m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u64m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u64m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u64m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u64m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u64m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8m1 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8m1(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8m2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8m2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8m2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8m2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg4ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg4ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f16m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_f16m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg5ei16_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg5ei16_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f32m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_f32m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg5ei16_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f64m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_f64m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i16m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i16m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i32m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i32m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i64m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i64m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i8m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i8m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg5ei16_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg5ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u16m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u16m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u32m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u32m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u64m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u64m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u8m1 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u8m1(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg5ei16_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg5ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg5ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f16m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_f16m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg5ei32_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg5ei32_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f32m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_f32m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg5ei32_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f64m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_f64m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i16m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i16m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i32m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i32m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i64m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i64m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i8m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i8m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg5ei32_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg5ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u16m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u16m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u32m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u32m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u64m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u64m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u8m1 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u8m1(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg5ei32_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg5ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg5ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f16m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_f16m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg5ei64_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg5ei64_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f32m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_f32m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg5ei64_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f64m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_f64m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i16m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i16m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i32m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i32m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i64m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i64m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i8m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i8m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg5ei64_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg5ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u16m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u16m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u32m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u32m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u64m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u64m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u8m1 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u8m1(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg5ei64_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg5ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg5ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f16m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_f16m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg5ei8_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg5ei8_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f32m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_f32m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg5ei8_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f64m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_f64m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i16m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i16m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i32m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i32m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i64m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i64m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i8m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i8m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg5ei8_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg5ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u16m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u16m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u32m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u32m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u64m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u64m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u8m1 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u8m1(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg5ei8_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg5ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg5ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f16m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_f16m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg6ei16_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg6ei16_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f32m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_f32m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg6ei16_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f64m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_f64m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i16m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i16m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i32m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i32m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i64m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i64m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i8m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i8m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg6ei16_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg6ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u16m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u16m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u32m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u32m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u64m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u64m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u8m1 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u8m1(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg6ei16_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg6ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg6ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f16m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_f16m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg6ei32_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg6ei32_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f32m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_f32m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg6ei32_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f64m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_f64m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i16m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i16m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i32m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i32m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i64m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i64m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i8m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i8m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg6ei32_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg6ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u16m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u16m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u32m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u32m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u64m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u64m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u8m1 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u8m1(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg6ei32_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg6ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg6ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f16m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_f16m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg6ei64_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg6ei64_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f32m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_f32m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg6ei64_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f64m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_f64m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i16m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i16m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i32m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i32m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i64m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i64m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i8m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i8m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg6ei64_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg6ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u16m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u16m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u32m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u32m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u64m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u64m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u8m1 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u8m1(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg6ei64_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg6ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg6ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f16m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_f16m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg6ei8_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg6ei8_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f32m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_f32m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg6ei8_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f64m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_f64m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i16m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i16m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i32m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i32m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i64m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i64m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i8m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i8m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg6ei8_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg6ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u16m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u16m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u32m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u32m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u64m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u64m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u8m1 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u8m1(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg6ei8_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg6ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg6ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f16m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_f16m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg7ei16_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg7ei16_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f32m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_f32m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg7ei16_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f64m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_f64m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i16m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i16m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i32m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i32m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i64m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i64m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i8m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i8m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg7ei16_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg7ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u16m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u16m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u32m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u32m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u64m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u64m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u8m1 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u8m1(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg7ei16_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg7ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg7ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f16m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_f16m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg7ei32_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg7ei32_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f32m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_f32m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg7ei32_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f64m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_f64m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i16m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i16m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i32m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i32m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i64m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i64m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i8m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i8m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg7ei32_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg7ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u16m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u16m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u32m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u32m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u64m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u64m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u8m1 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u8m1(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg7ei32_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg7ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg7ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f16m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_f16m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg7ei64_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg7ei64_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f32m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_f32m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg7ei64_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f64m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_f64m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i16m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i16m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i32m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i32m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i64m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i64m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i8m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i8m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg7ei64_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg7ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u16m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u16m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u32m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u32m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u64m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u64m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u8m1 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u8m1(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg7ei64_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg7ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg7ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f16m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_f16m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg7ei8_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg7ei8_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f32m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_f32m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg7ei8_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f64m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_f64m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i16m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i16m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i32m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i32m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i64m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i64m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i8m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i8m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg7ei8_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg7ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u16m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u16m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u32m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u32m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u64m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u64m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u8m1 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u8m1(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg7ei8_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg7ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg7ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f16m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_f16m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg8ei16_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg8ei16_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f32m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_f32m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg8ei16_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f64m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_f64m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i16m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i16m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i32m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i32m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i64m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i64m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i8m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i8m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg8ei16_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg8ei16_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u16m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u16m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u32m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u32m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u64m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u64m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u8m1 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u8m1(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg8ei16_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg8ei16_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg8ei16_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f16m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_f16m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg8ei32_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg8ei32_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f32m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_f32m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg8ei32_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f64m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_f64m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i16m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i16m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i32m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i32m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i64m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i64m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i8m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i8m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg8ei32_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg8ei32_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u16m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u16m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u32m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u32m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u64m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u64m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u8m1 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u8m1(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg8ei32_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg8ei32_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg8ei32_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f16m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_f16m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg8ei64_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg8ei64_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f32m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_f32m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg8ei64_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f64m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_f64m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i16m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i16m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i32m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i32m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i64m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i64m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i8m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i8m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg8ei64_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg8ei64_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u16m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u16m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u32m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u32m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u64m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u64m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u8m1 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u8m1(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg8ei64_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg8ei64_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg8ei64_v_u8mf8_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f16m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_f16m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f16m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_f16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f16mf2 | ( | ... | ) | __riscv_vsuxseg8ei8_v_f16mf2(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_f16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f16mf4 | ( | ... | ) | __riscv_vsuxseg8ei8_v_f16mf4(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_f16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f32m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_f32m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f32m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_f32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f32mf2 | ( | ... | ) | __riscv_vsuxseg8ei8_v_f32mf2(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_f32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f64m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_f64m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_f64m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_f64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i16m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i16m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i16m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i16mf2 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i16mf2(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i16mf4 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i16mf4(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i32m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i32m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i32m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i32mf2 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i32mf2(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i64m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i64m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i64m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i8m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i8m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i8m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i8m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i8mf2 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i8mf2(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i8mf2_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i8mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i8mf4 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i8mf4(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i8mf4_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i8mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i8mf8 | ( | ... | ) | __riscv_vsuxseg8ei8_v_i8mf8(__VA_ARGS__) |
| #define vsuxseg8ei8_v_i8mf8_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_i8mf8_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u16m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u16m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u16m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u16m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u16mf2 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u16mf2(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u16mf2_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u16mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u16mf4 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u16mf4(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u16mf4_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u16mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u32m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u32m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u32m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u32m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u32mf2 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u32mf2(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u32mf2_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u32mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u64m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u64m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u64m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u64m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u8m1 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u8m1(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u8m1_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u8m1_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u8mf2 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u8mf2(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u8mf2_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u8mf2_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u8mf4 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u8mf4(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u8mf4_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u8mf4_m(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u8mf8 | ( | ... | ) | __riscv_vsuxseg8ei8_v_u8mf8(__VA_ARGS__) |
| #define vsuxseg8ei8_v_u8mf8_m | ( | ... | ) | __riscv_vsuxseg8ei8_v_u8mf8_m(__VA_ARGS__) |
| #define vundefined_f16m1 | ( | ... | ) | __riscv_vundefined_f16m1(__VA_ARGS__) |
| #define vundefined_f16m2 | ( | ... | ) | __riscv_vundefined_f16m2(__VA_ARGS__) |
| #define vundefined_f16m4 | ( | ... | ) | __riscv_vundefined_f16m4(__VA_ARGS__) |
| #define vundefined_f16m8 | ( | ... | ) | __riscv_vundefined_f16m8(__VA_ARGS__) |
| #define vundefined_f16mf2 | ( | ... | ) | __riscv_vundefined_f16mf2(__VA_ARGS__) |
| #define vundefined_f16mf4 | ( | ... | ) | __riscv_vundefined_f16mf4(__VA_ARGS__) |
| #define vundefined_f32m1 | ( | ... | ) | __riscv_vundefined_f32m1(__VA_ARGS__) |
| #define vundefined_f32m2 | ( | ... | ) | __riscv_vundefined_f32m2(__VA_ARGS__) |
| #define vundefined_f32m4 | ( | ... | ) | __riscv_vundefined_f32m4(__VA_ARGS__) |
| #define vundefined_f32m8 | ( | ... | ) | __riscv_vundefined_f32m8(__VA_ARGS__) |
| #define vundefined_f32mf2 | ( | ... | ) | __riscv_vundefined_f32mf2(__VA_ARGS__) |
| #define vundefined_f64m1 | ( | ... | ) | __riscv_vundefined_f64m1(__VA_ARGS__) |
| #define vundefined_f64m2 | ( | ... | ) | __riscv_vundefined_f64m2(__VA_ARGS__) |
| #define vundefined_f64m4 | ( | ... | ) | __riscv_vundefined_f64m4(__VA_ARGS__) |
| #define vundefined_f64m8 | ( | ... | ) | __riscv_vundefined_f64m8(__VA_ARGS__) |
| #define vundefined_i16m1 | ( | ... | ) | __riscv_vundefined_i16m1(__VA_ARGS__) |
| #define vundefined_i16m2 | ( | ... | ) | __riscv_vundefined_i16m2(__VA_ARGS__) |
| #define vundefined_i16m4 | ( | ... | ) | __riscv_vundefined_i16m4(__VA_ARGS__) |
| #define vundefined_i16m8 | ( | ... | ) | __riscv_vundefined_i16m8(__VA_ARGS__) |
| #define vundefined_i16mf2 | ( | ... | ) | __riscv_vundefined_i16mf2(__VA_ARGS__) |
| #define vundefined_i16mf4 | ( | ... | ) | __riscv_vundefined_i16mf4(__VA_ARGS__) |
| #define vundefined_i32m1 | ( | ... | ) | __riscv_vundefined_i32m1(__VA_ARGS__) |
| #define vundefined_i32m2 | ( | ... | ) | __riscv_vundefined_i32m2(__VA_ARGS__) |
| #define vundefined_i32m4 | ( | ... | ) | __riscv_vundefined_i32m4(__VA_ARGS__) |
| #define vundefined_i32m8 | ( | ... | ) | __riscv_vundefined_i32m8(__VA_ARGS__) |
| #define vundefined_i32mf2 | ( | ... | ) | __riscv_vundefined_i32mf2(__VA_ARGS__) |
| #define vundefined_i64m1 | ( | ... | ) | __riscv_vundefined_i64m1(__VA_ARGS__) |
| #define vundefined_i64m2 | ( | ... | ) | __riscv_vundefined_i64m2(__VA_ARGS__) |
| #define vundefined_i64m4 | ( | ... | ) | __riscv_vundefined_i64m4(__VA_ARGS__) |
| #define vundefined_i64m8 | ( | ... | ) | __riscv_vundefined_i64m8(__VA_ARGS__) |
| #define vundefined_i8m1 | ( | ... | ) | __riscv_vundefined_i8m1(__VA_ARGS__) |
| #define vundefined_i8m2 | ( | ... | ) | __riscv_vundefined_i8m2(__VA_ARGS__) |
| #define vundefined_i8m4 | ( | ... | ) | __riscv_vundefined_i8m4(__VA_ARGS__) |
| #define vundefined_i8m8 | ( | ... | ) | __riscv_vundefined_i8m8(__VA_ARGS__) |
| #define vundefined_i8mf2 | ( | ... | ) | __riscv_vundefined_i8mf2(__VA_ARGS__) |
| #define vundefined_i8mf4 | ( | ... | ) | __riscv_vundefined_i8mf4(__VA_ARGS__) |
| #define vundefined_i8mf8 | ( | ... | ) | __riscv_vundefined_i8mf8(__VA_ARGS__) |
| #define vundefined_u16m1 | ( | ... | ) | __riscv_vundefined_u16m1(__VA_ARGS__) |
| #define vundefined_u16m2 | ( | ... | ) | __riscv_vundefined_u16m2(__VA_ARGS__) |
| #define vundefined_u16m4 | ( | ... | ) | __riscv_vundefined_u16m4(__VA_ARGS__) |
| #define vundefined_u16m8 | ( | ... | ) | __riscv_vundefined_u16m8(__VA_ARGS__) |
| #define vundefined_u16mf2 | ( | ... | ) | __riscv_vundefined_u16mf2(__VA_ARGS__) |
| #define vundefined_u16mf4 | ( | ... | ) | __riscv_vundefined_u16mf4(__VA_ARGS__) |
| #define vundefined_u32m1 | ( | ... | ) | __riscv_vundefined_u32m1(__VA_ARGS__) |
| #define vundefined_u32m2 | ( | ... | ) | __riscv_vundefined_u32m2(__VA_ARGS__) |
| #define vundefined_u32m4 | ( | ... | ) | __riscv_vundefined_u32m4(__VA_ARGS__) |
| #define vundefined_u32m8 | ( | ... | ) | __riscv_vundefined_u32m8(__VA_ARGS__) |
| #define vundefined_u32mf2 | ( | ... | ) | __riscv_vundefined_u32mf2(__VA_ARGS__) |
| #define vundefined_u64m1 | ( | ... | ) | __riscv_vundefined_u64m1(__VA_ARGS__) |
| #define vundefined_u64m2 | ( | ... | ) | __riscv_vundefined_u64m2(__VA_ARGS__) |
| #define vundefined_u64m4 | ( | ... | ) | __riscv_vundefined_u64m4(__VA_ARGS__) |
| #define vundefined_u64m8 | ( | ... | ) | __riscv_vundefined_u64m8(__VA_ARGS__) |
| #define vundefined_u8m1 | ( | ... | ) | __riscv_vundefined_u8m1(__VA_ARGS__) |
| #define vundefined_u8m2 | ( | ... | ) | __riscv_vundefined_u8m2(__VA_ARGS__) |
| #define vundefined_u8m4 | ( | ... | ) | __riscv_vundefined_u8m4(__VA_ARGS__) |
| #define vundefined_u8m8 | ( | ... | ) | __riscv_vundefined_u8m8(__VA_ARGS__) |
| #define vundefined_u8mf2 | ( | ... | ) | __riscv_vundefined_u8mf2(__VA_ARGS__) |
| #define vundefined_u8mf4 | ( | ... | ) | __riscv_vundefined_u8mf4(__VA_ARGS__) |
| #define vundefined_u8mf8 | ( | ... | ) | __riscv_vundefined_u8mf8(__VA_ARGS__) |
| #define vwadd_vv_i16m1 | ( | ... | ) | __riscv_vwadd_vv_i16m1(__VA_ARGS__) |
| #define vwadd_vv_i16m1_m | ( | ... | ) | __riscv_vwadd_vv_i16m1_tumu(__VA_ARGS__) |
| #define vwadd_vv_i16m2 | ( | ... | ) | __riscv_vwadd_vv_i16m2(__VA_ARGS__) |
| #define vwadd_vv_i16m2_m | ( | ... | ) | __riscv_vwadd_vv_i16m2_tumu(__VA_ARGS__) |
| #define vwadd_vv_i16m4 | ( | ... | ) | __riscv_vwadd_vv_i16m4(__VA_ARGS__) |
| #define vwadd_vv_i16m4_m | ( | ... | ) | __riscv_vwadd_vv_i16m4_tumu(__VA_ARGS__) |
| #define vwadd_vv_i16m8 | ( | ... | ) | __riscv_vwadd_vv_i16m8(__VA_ARGS__) |
| #define vwadd_vv_i16m8_m | ( | ... | ) | __riscv_vwadd_vv_i16m8_tumu(__VA_ARGS__) |
| #define vwadd_vv_i16mf2 | ( | ... | ) | __riscv_vwadd_vv_i16mf2(__VA_ARGS__) |
| #define vwadd_vv_i16mf2_m | ( | ... | ) | __riscv_vwadd_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vwadd_vv_i16mf4 | ( | ... | ) | __riscv_vwadd_vv_i16mf4(__VA_ARGS__) |
| #define vwadd_vv_i16mf4_m | ( | ... | ) | __riscv_vwadd_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vwadd_vv_i32m1 | ( | ... | ) | __riscv_vwadd_vv_i32m1(__VA_ARGS__) |
| #define vwadd_vv_i32m1_m | ( | ... | ) | __riscv_vwadd_vv_i32m1_tumu(__VA_ARGS__) |
| #define vwadd_vv_i32m2 | ( | ... | ) | __riscv_vwadd_vv_i32m2(__VA_ARGS__) |
| #define vwadd_vv_i32m2_m | ( | ... | ) | __riscv_vwadd_vv_i32m2_tumu(__VA_ARGS__) |
| #define vwadd_vv_i32m4 | ( | ... | ) | __riscv_vwadd_vv_i32m4(__VA_ARGS__) |
| #define vwadd_vv_i32m4_m | ( | ... | ) | __riscv_vwadd_vv_i32m4_tumu(__VA_ARGS__) |
| #define vwadd_vv_i32m8 | ( | ... | ) | __riscv_vwadd_vv_i32m8(__VA_ARGS__) |
| #define vwadd_vv_i32m8_m | ( | ... | ) | __riscv_vwadd_vv_i32m8_tumu(__VA_ARGS__) |
| #define vwadd_vv_i32mf2 | ( | ... | ) | __riscv_vwadd_vv_i32mf2(__VA_ARGS__) |
| #define vwadd_vv_i32mf2_m | ( | ... | ) | __riscv_vwadd_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vwadd_vv_i64m1 | ( | ... | ) | __riscv_vwadd_vv_i64m1(__VA_ARGS__) |
| #define vwadd_vv_i64m1_m | ( | ... | ) | __riscv_vwadd_vv_i64m1_tumu(__VA_ARGS__) |
| #define vwadd_vv_i64m2 | ( | ... | ) | __riscv_vwadd_vv_i64m2(__VA_ARGS__) |
| #define vwadd_vv_i64m2_m | ( | ... | ) | __riscv_vwadd_vv_i64m2_tumu(__VA_ARGS__) |
| #define vwadd_vv_i64m4 | ( | ... | ) | __riscv_vwadd_vv_i64m4(__VA_ARGS__) |
| #define vwadd_vv_i64m4_m | ( | ... | ) | __riscv_vwadd_vv_i64m4_tumu(__VA_ARGS__) |
| #define vwadd_vv_i64m8 | ( | ... | ) | __riscv_vwadd_vv_i64m8(__VA_ARGS__) |
| #define vwadd_vv_i64m8_m | ( | ... | ) | __riscv_vwadd_vv_i64m8_tumu(__VA_ARGS__) |
| #define vwadd_vx_i16m1 | ( | ... | ) | __riscv_vwadd_vx_i16m1(__VA_ARGS__) |
| #define vwadd_vx_i16m1_m | ( | ... | ) | __riscv_vwadd_vx_i16m1_tumu(__VA_ARGS__) |
| #define vwadd_vx_i16m2 | ( | ... | ) | __riscv_vwadd_vx_i16m2(__VA_ARGS__) |
| #define vwadd_vx_i16m2_m | ( | ... | ) | __riscv_vwadd_vx_i16m2_tumu(__VA_ARGS__) |
| #define vwadd_vx_i16m4 | ( | ... | ) | __riscv_vwadd_vx_i16m4(__VA_ARGS__) |
| #define vwadd_vx_i16m4_m | ( | ... | ) | __riscv_vwadd_vx_i16m4_tumu(__VA_ARGS__) |
| #define vwadd_vx_i16m8 | ( | ... | ) | __riscv_vwadd_vx_i16m8(__VA_ARGS__) |
| #define vwadd_vx_i16m8_m | ( | ... | ) | __riscv_vwadd_vx_i16m8_tumu(__VA_ARGS__) |
| #define vwadd_vx_i16mf2 | ( | ... | ) | __riscv_vwadd_vx_i16mf2(__VA_ARGS__) |
| #define vwadd_vx_i16mf2_m | ( | ... | ) | __riscv_vwadd_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vwadd_vx_i16mf4 | ( | ... | ) | __riscv_vwadd_vx_i16mf4(__VA_ARGS__) |
| #define vwadd_vx_i16mf4_m | ( | ... | ) | __riscv_vwadd_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vwadd_vx_i32m1 | ( | ... | ) | __riscv_vwadd_vx_i32m1(__VA_ARGS__) |
| #define vwadd_vx_i32m1_m | ( | ... | ) | __riscv_vwadd_vx_i32m1_tumu(__VA_ARGS__) |
| #define vwadd_vx_i32m2 | ( | ... | ) | __riscv_vwadd_vx_i32m2(__VA_ARGS__) |
| #define vwadd_vx_i32m2_m | ( | ... | ) | __riscv_vwadd_vx_i32m2_tumu(__VA_ARGS__) |
| #define vwadd_vx_i32m4 | ( | ... | ) | __riscv_vwadd_vx_i32m4(__VA_ARGS__) |
| #define vwadd_vx_i32m4_m | ( | ... | ) | __riscv_vwadd_vx_i32m4_tumu(__VA_ARGS__) |
| #define vwadd_vx_i32m8 | ( | ... | ) | __riscv_vwadd_vx_i32m8(__VA_ARGS__) |
| #define vwadd_vx_i32m8_m | ( | ... | ) | __riscv_vwadd_vx_i32m8_tumu(__VA_ARGS__) |
| #define vwadd_vx_i32mf2 | ( | ... | ) | __riscv_vwadd_vx_i32mf2(__VA_ARGS__) |
| #define vwadd_vx_i32mf2_m | ( | ... | ) | __riscv_vwadd_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vwadd_vx_i64m1 | ( | ... | ) | __riscv_vwadd_vx_i64m1(__VA_ARGS__) |
| #define vwadd_vx_i64m1_m | ( | ... | ) | __riscv_vwadd_vx_i64m1_tumu(__VA_ARGS__) |
| #define vwadd_vx_i64m2 | ( | ... | ) | __riscv_vwadd_vx_i64m2(__VA_ARGS__) |
| #define vwadd_vx_i64m2_m | ( | ... | ) | __riscv_vwadd_vx_i64m2_tumu(__VA_ARGS__) |
| #define vwadd_vx_i64m4 | ( | ... | ) | __riscv_vwadd_vx_i64m4(__VA_ARGS__) |
| #define vwadd_vx_i64m4_m | ( | ... | ) | __riscv_vwadd_vx_i64m4_tumu(__VA_ARGS__) |
| #define vwadd_vx_i64m8 | ( | ... | ) | __riscv_vwadd_vx_i64m8(__VA_ARGS__) |
| #define vwadd_vx_i64m8_m | ( | ... | ) | __riscv_vwadd_vx_i64m8_tumu(__VA_ARGS__) |
| #define vwadd_wv_i16m1 | ( | ... | ) | __riscv_vwadd_wv_i16m1(__VA_ARGS__) |
| #define vwadd_wv_i16m1_m | ( | ... | ) | __riscv_vwadd_wv_i16m1_tumu(__VA_ARGS__) |
| #define vwadd_wv_i16m2 | ( | ... | ) | __riscv_vwadd_wv_i16m2(__VA_ARGS__) |
| #define vwadd_wv_i16m2_m | ( | ... | ) | __riscv_vwadd_wv_i16m2_tumu(__VA_ARGS__) |
| #define vwadd_wv_i16m4 | ( | ... | ) | __riscv_vwadd_wv_i16m4(__VA_ARGS__) |
| #define vwadd_wv_i16m4_m | ( | ... | ) | __riscv_vwadd_wv_i16m4_tumu(__VA_ARGS__) |
| #define vwadd_wv_i16m8 | ( | ... | ) | __riscv_vwadd_wv_i16m8(__VA_ARGS__) |
| #define vwadd_wv_i16m8_m | ( | ... | ) | __riscv_vwadd_wv_i16m8_tumu(__VA_ARGS__) |
| #define vwadd_wv_i16mf2 | ( | ... | ) | __riscv_vwadd_wv_i16mf2(__VA_ARGS__) |
| #define vwadd_wv_i16mf2_m | ( | ... | ) | __riscv_vwadd_wv_i16mf2_tumu(__VA_ARGS__) |
| #define vwadd_wv_i16mf4 | ( | ... | ) | __riscv_vwadd_wv_i16mf4(__VA_ARGS__) |
| #define vwadd_wv_i16mf4_m | ( | ... | ) | __riscv_vwadd_wv_i16mf4_tumu(__VA_ARGS__) |
| #define vwadd_wv_i32m1 | ( | ... | ) | __riscv_vwadd_wv_i32m1(__VA_ARGS__) |
| #define vwadd_wv_i32m1_m | ( | ... | ) | __riscv_vwadd_wv_i32m1_tumu(__VA_ARGS__) |
| #define vwadd_wv_i32m2 | ( | ... | ) | __riscv_vwadd_wv_i32m2(__VA_ARGS__) |
| #define vwadd_wv_i32m2_m | ( | ... | ) | __riscv_vwadd_wv_i32m2_tumu(__VA_ARGS__) |
| #define vwadd_wv_i32m4 | ( | ... | ) | __riscv_vwadd_wv_i32m4(__VA_ARGS__) |
| #define vwadd_wv_i32m4_m | ( | ... | ) | __riscv_vwadd_wv_i32m4_tumu(__VA_ARGS__) |
| #define vwadd_wv_i32m8 | ( | ... | ) | __riscv_vwadd_wv_i32m8(__VA_ARGS__) |
| #define vwadd_wv_i32m8_m | ( | ... | ) | __riscv_vwadd_wv_i32m8_tumu(__VA_ARGS__) |
| #define vwadd_wv_i32mf2 | ( | ... | ) | __riscv_vwadd_wv_i32mf2(__VA_ARGS__) |
| #define vwadd_wv_i32mf2_m | ( | ... | ) | __riscv_vwadd_wv_i32mf2_tumu(__VA_ARGS__) |
| #define vwadd_wv_i64m1 | ( | ... | ) | __riscv_vwadd_wv_i64m1(__VA_ARGS__) |
| #define vwadd_wv_i64m1_m | ( | ... | ) | __riscv_vwadd_wv_i64m1_tumu(__VA_ARGS__) |
| #define vwadd_wv_i64m2 | ( | ... | ) | __riscv_vwadd_wv_i64m2(__VA_ARGS__) |
| #define vwadd_wv_i64m2_m | ( | ... | ) | __riscv_vwadd_wv_i64m2_tumu(__VA_ARGS__) |
| #define vwadd_wv_i64m4 | ( | ... | ) | __riscv_vwadd_wv_i64m4(__VA_ARGS__) |
| #define vwadd_wv_i64m4_m | ( | ... | ) | __riscv_vwadd_wv_i64m4_tumu(__VA_ARGS__) |
| #define vwadd_wv_i64m8 | ( | ... | ) | __riscv_vwadd_wv_i64m8(__VA_ARGS__) |
| #define vwadd_wv_i64m8_m | ( | ... | ) | __riscv_vwadd_wv_i64m8_tumu(__VA_ARGS__) |
| #define vwadd_wx_i16m1 | ( | ... | ) | __riscv_vwadd_wx_i16m1(__VA_ARGS__) |
| #define vwadd_wx_i16m1_m | ( | ... | ) | __riscv_vwadd_wx_i16m1_tumu(__VA_ARGS__) |
| #define vwadd_wx_i16m2 | ( | ... | ) | __riscv_vwadd_wx_i16m2(__VA_ARGS__) |
| #define vwadd_wx_i16m2_m | ( | ... | ) | __riscv_vwadd_wx_i16m2_tumu(__VA_ARGS__) |
| #define vwadd_wx_i16m4 | ( | ... | ) | __riscv_vwadd_wx_i16m4(__VA_ARGS__) |
| #define vwadd_wx_i16m4_m | ( | ... | ) | __riscv_vwadd_wx_i16m4_tumu(__VA_ARGS__) |
| #define vwadd_wx_i16m8 | ( | ... | ) | __riscv_vwadd_wx_i16m8(__VA_ARGS__) |
| #define vwadd_wx_i16m8_m | ( | ... | ) | __riscv_vwadd_wx_i16m8_tumu(__VA_ARGS__) |
| #define vwadd_wx_i16mf2 | ( | ... | ) | __riscv_vwadd_wx_i16mf2(__VA_ARGS__) |
| #define vwadd_wx_i16mf2_m | ( | ... | ) | __riscv_vwadd_wx_i16mf2_tumu(__VA_ARGS__) |
| #define vwadd_wx_i16mf4 | ( | ... | ) | __riscv_vwadd_wx_i16mf4(__VA_ARGS__) |
| #define vwadd_wx_i16mf4_m | ( | ... | ) | __riscv_vwadd_wx_i16mf4_tumu(__VA_ARGS__) |
| #define vwadd_wx_i32m1 | ( | ... | ) | __riscv_vwadd_wx_i32m1(__VA_ARGS__) |
| #define vwadd_wx_i32m1_m | ( | ... | ) | __riscv_vwadd_wx_i32m1_tumu(__VA_ARGS__) |
| #define vwadd_wx_i32m2 | ( | ... | ) | __riscv_vwadd_wx_i32m2(__VA_ARGS__) |
| #define vwadd_wx_i32m2_m | ( | ... | ) | __riscv_vwadd_wx_i32m2_tumu(__VA_ARGS__) |
| #define vwadd_wx_i32m4 | ( | ... | ) | __riscv_vwadd_wx_i32m4(__VA_ARGS__) |
| #define vwadd_wx_i32m4_m | ( | ... | ) | __riscv_vwadd_wx_i32m4_tumu(__VA_ARGS__) |
| #define vwadd_wx_i32m8 | ( | ... | ) | __riscv_vwadd_wx_i32m8(__VA_ARGS__) |
| #define vwadd_wx_i32m8_m | ( | ... | ) | __riscv_vwadd_wx_i32m8_tumu(__VA_ARGS__) |
| #define vwadd_wx_i32mf2 | ( | ... | ) | __riscv_vwadd_wx_i32mf2(__VA_ARGS__) |
| #define vwadd_wx_i32mf2_m | ( | ... | ) | __riscv_vwadd_wx_i32mf2_tumu(__VA_ARGS__) |
| #define vwadd_wx_i64m1 | ( | ... | ) | __riscv_vwadd_wx_i64m1(__VA_ARGS__) |
| #define vwadd_wx_i64m1_m | ( | ... | ) | __riscv_vwadd_wx_i64m1_tumu(__VA_ARGS__) |
| #define vwadd_wx_i64m2 | ( | ... | ) | __riscv_vwadd_wx_i64m2(__VA_ARGS__) |
| #define vwadd_wx_i64m2_m | ( | ... | ) | __riscv_vwadd_wx_i64m2_tumu(__VA_ARGS__) |
| #define vwadd_wx_i64m4 | ( | ... | ) | __riscv_vwadd_wx_i64m4(__VA_ARGS__) |
| #define vwadd_wx_i64m4_m | ( | ... | ) | __riscv_vwadd_wx_i64m4_tumu(__VA_ARGS__) |
| #define vwadd_wx_i64m8 | ( | ... | ) | __riscv_vwadd_wx_i64m8(__VA_ARGS__) |
| #define vwadd_wx_i64m8_m | ( | ... | ) | __riscv_vwadd_wx_i64m8_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u16m1 | ( | ... | ) | __riscv_vwaddu_vv_u16m1(__VA_ARGS__) |
| #define vwaddu_vv_u16m1_m | ( | ... | ) | __riscv_vwaddu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u16m2 | ( | ... | ) | __riscv_vwaddu_vv_u16m2(__VA_ARGS__) |
| #define vwaddu_vv_u16m2_m | ( | ... | ) | __riscv_vwaddu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u16m4 | ( | ... | ) | __riscv_vwaddu_vv_u16m4(__VA_ARGS__) |
| #define vwaddu_vv_u16m4_m | ( | ... | ) | __riscv_vwaddu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u16m8 | ( | ... | ) | __riscv_vwaddu_vv_u16m8(__VA_ARGS__) |
| #define vwaddu_vv_u16m8_m | ( | ... | ) | __riscv_vwaddu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u16mf2 | ( | ... | ) | __riscv_vwaddu_vv_u16mf2(__VA_ARGS__) |
| #define vwaddu_vv_u16mf2_m | ( | ... | ) | __riscv_vwaddu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u16mf4 | ( | ... | ) | __riscv_vwaddu_vv_u16mf4(__VA_ARGS__) |
| #define vwaddu_vv_u16mf4_m | ( | ... | ) | __riscv_vwaddu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u32m1 | ( | ... | ) | __riscv_vwaddu_vv_u32m1(__VA_ARGS__) |
| #define vwaddu_vv_u32m1_m | ( | ... | ) | __riscv_vwaddu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u32m2 | ( | ... | ) | __riscv_vwaddu_vv_u32m2(__VA_ARGS__) |
| #define vwaddu_vv_u32m2_m | ( | ... | ) | __riscv_vwaddu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u32m4 | ( | ... | ) | __riscv_vwaddu_vv_u32m4(__VA_ARGS__) |
| #define vwaddu_vv_u32m4_m | ( | ... | ) | __riscv_vwaddu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u32m8 | ( | ... | ) | __riscv_vwaddu_vv_u32m8(__VA_ARGS__) |
| #define vwaddu_vv_u32m8_m | ( | ... | ) | __riscv_vwaddu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u32mf2 | ( | ... | ) | __riscv_vwaddu_vv_u32mf2(__VA_ARGS__) |
| #define vwaddu_vv_u32mf2_m | ( | ... | ) | __riscv_vwaddu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u64m1 | ( | ... | ) | __riscv_vwaddu_vv_u64m1(__VA_ARGS__) |
| #define vwaddu_vv_u64m1_m | ( | ... | ) | __riscv_vwaddu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u64m2 | ( | ... | ) | __riscv_vwaddu_vv_u64m2(__VA_ARGS__) |
| #define vwaddu_vv_u64m2_m | ( | ... | ) | __riscv_vwaddu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u64m4 | ( | ... | ) | __riscv_vwaddu_vv_u64m4(__VA_ARGS__) |
| #define vwaddu_vv_u64m4_m | ( | ... | ) | __riscv_vwaddu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vwaddu_vv_u64m8 | ( | ... | ) | __riscv_vwaddu_vv_u64m8(__VA_ARGS__) |
| #define vwaddu_vv_u64m8_m | ( | ... | ) | __riscv_vwaddu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u16m1 | ( | ... | ) | __riscv_vwaddu_vx_u16m1(__VA_ARGS__) |
| #define vwaddu_vx_u16m1_m | ( | ... | ) | __riscv_vwaddu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u16m2 | ( | ... | ) | __riscv_vwaddu_vx_u16m2(__VA_ARGS__) |
| #define vwaddu_vx_u16m2_m | ( | ... | ) | __riscv_vwaddu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u16m4 | ( | ... | ) | __riscv_vwaddu_vx_u16m4(__VA_ARGS__) |
| #define vwaddu_vx_u16m4_m | ( | ... | ) | __riscv_vwaddu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u16m8 | ( | ... | ) | __riscv_vwaddu_vx_u16m8(__VA_ARGS__) |
| #define vwaddu_vx_u16m8_m | ( | ... | ) | __riscv_vwaddu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u16mf2 | ( | ... | ) | __riscv_vwaddu_vx_u16mf2(__VA_ARGS__) |
| #define vwaddu_vx_u16mf2_m | ( | ... | ) | __riscv_vwaddu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u16mf4 | ( | ... | ) | __riscv_vwaddu_vx_u16mf4(__VA_ARGS__) |
| #define vwaddu_vx_u16mf4_m | ( | ... | ) | __riscv_vwaddu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u32m1 | ( | ... | ) | __riscv_vwaddu_vx_u32m1(__VA_ARGS__) |
| #define vwaddu_vx_u32m1_m | ( | ... | ) | __riscv_vwaddu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u32m2 | ( | ... | ) | __riscv_vwaddu_vx_u32m2(__VA_ARGS__) |
| #define vwaddu_vx_u32m2_m | ( | ... | ) | __riscv_vwaddu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u32m4 | ( | ... | ) | __riscv_vwaddu_vx_u32m4(__VA_ARGS__) |
| #define vwaddu_vx_u32m4_m | ( | ... | ) | __riscv_vwaddu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u32m8 | ( | ... | ) | __riscv_vwaddu_vx_u32m8(__VA_ARGS__) |
| #define vwaddu_vx_u32m8_m | ( | ... | ) | __riscv_vwaddu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u32mf2 | ( | ... | ) | __riscv_vwaddu_vx_u32mf2(__VA_ARGS__) |
| #define vwaddu_vx_u32mf2_m | ( | ... | ) | __riscv_vwaddu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u64m1 | ( | ... | ) | __riscv_vwaddu_vx_u64m1(__VA_ARGS__) |
| #define vwaddu_vx_u64m1_m | ( | ... | ) | __riscv_vwaddu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u64m2 | ( | ... | ) | __riscv_vwaddu_vx_u64m2(__VA_ARGS__) |
| #define vwaddu_vx_u64m2_m | ( | ... | ) | __riscv_vwaddu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u64m4 | ( | ... | ) | __riscv_vwaddu_vx_u64m4(__VA_ARGS__) |
| #define vwaddu_vx_u64m4_m | ( | ... | ) | __riscv_vwaddu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vwaddu_vx_u64m8 | ( | ... | ) | __riscv_vwaddu_vx_u64m8(__VA_ARGS__) |
| #define vwaddu_vx_u64m8_m | ( | ... | ) | __riscv_vwaddu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u16m1 | ( | ... | ) | __riscv_vwaddu_wv_u16m1(__VA_ARGS__) |
| #define vwaddu_wv_u16m1_m | ( | ... | ) | __riscv_vwaddu_wv_u16m1_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u16m2 | ( | ... | ) | __riscv_vwaddu_wv_u16m2(__VA_ARGS__) |
| #define vwaddu_wv_u16m2_m | ( | ... | ) | __riscv_vwaddu_wv_u16m2_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u16m4 | ( | ... | ) | __riscv_vwaddu_wv_u16m4(__VA_ARGS__) |
| #define vwaddu_wv_u16m4_m | ( | ... | ) | __riscv_vwaddu_wv_u16m4_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u16m8 | ( | ... | ) | __riscv_vwaddu_wv_u16m8(__VA_ARGS__) |
| #define vwaddu_wv_u16m8_m | ( | ... | ) | __riscv_vwaddu_wv_u16m8_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u16mf2 | ( | ... | ) | __riscv_vwaddu_wv_u16mf2(__VA_ARGS__) |
| #define vwaddu_wv_u16mf2_m | ( | ... | ) | __riscv_vwaddu_wv_u16mf2_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u16mf4 | ( | ... | ) | __riscv_vwaddu_wv_u16mf4(__VA_ARGS__) |
| #define vwaddu_wv_u16mf4_m | ( | ... | ) | __riscv_vwaddu_wv_u16mf4_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u32m1 | ( | ... | ) | __riscv_vwaddu_wv_u32m1(__VA_ARGS__) |
| #define vwaddu_wv_u32m1_m | ( | ... | ) | __riscv_vwaddu_wv_u32m1_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u32m2 | ( | ... | ) | __riscv_vwaddu_wv_u32m2(__VA_ARGS__) |
| #define vwaddu_wv_u32m2_m | ( | ... | ) | __riscv_vwaddu_wv_u32m2_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u32m4 | ( | ... | ) | __riscv_vwaddu_wv_u32m4(__VA_ARGS__) |
| #define vwaddu_wv_u32m4_m | ( | ... | ) | __riscv_vwaddu_wv_u32m4_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u32m8 | ( | ... | ) | __riscv_vwaddu_wv_u32m8(__VA_ARGS__) |
| #define vwaddu_wv_u32m8_m | ( | ... | ) | __riscv_vwaddu_wv_u32m8_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u32mf2 | ( | ... | ) | __riscv_vwaddu_wv_u32mf2(__VA_ARGS__) |
| #define vwaddu_wv_u32mf2_m | ( | ... | ) | __riscv_vwaddu_wv_u32mf2_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u64m1 | ( | ... | ) | __riscv_vwaddu_wv_u64m1(__VA_ARGS__) |
| #define vwaddu_wv_u64m1_m | ( | ... | ) | __riscv_vwaddu_wv_u64m1_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u64m2 | ( | ... | ) | __riscv_vwaddu_wv_u64m2(__VA_ARGS__) |
| #define vwaddu_wv_u64m2_m | ( | ... | ) | __riscv_vwaddu_wv_u64m2_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u64m4 | ( | ... | ) | __riscv_vwaddu_wv_u64m4(__VA_ARGS__) |
| #define vwaddu_wv_u64m4_m | ( | ... | ) | __riscv_vwaddu_wv_u64m4_tumu(__VA_ARGS__) |
| #define vwaddu_wv_u64m8 | ( | ... | ) | __riscv_vwaddu_wv_u64m8(__VA_ARGS__) |
| #define vwaddu_wv_u64m8_m | ( | ... | ) | __riscv_vwaddu_wv_u64m8_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u16m1 | ( | ... | ) | __riscv_vwaddu_wx_u16m1(__VA_ARGS__) |
| #define vwaddu_wx_u16m1_m | ( | ... | ) | __riscv_vwaddu_wx_u16m1_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u16m2 | ( | ... | ) | __riscv_vwaddu_wx_u16m2(__VA_ARGS__) |
| #define vwaddu_wx_u16m2_m | ( | ... | ) | __riscv_vwaddu_wx_u16m2_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u16m4 | ( | ... | ) | __riscv_vwaddu_wx_u16m4(__VA_ARGS__) |
| #define vwaddu_wx_u16m4_m | ( | ... | ) | __riscv_vwaddu_wx_u16m4_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u16m8 | ( | ... | ) | __riscv_vwaddu_wx_u16m8(__VA_ARGS__) |
| #define vwaddu_wx_u16m8_m | ( | ... | ) | __riscv_vwaddu_wx_u16m8_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u16mf2 | ( | ... | ) | __riscv_vwaddu_wx_u16mf2(__VA_ARGS__) |
| #define vwaddu_wx_u16mf2_m | ( | ... | ) | __riscv_vwaddu_wx_u16mf2_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u16mf4 | ( | ... | ) | __riscv_vwaddu_wx_u16mf4(__VA_ARGS__) |
| #define vwaddu_wx_u16mf4_m | ( | ... | ) | __riscv_vwaddu_wx_u16mf4_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u32m1 | ( | ... | ) | __riscv_vwaddu_wx_u32m1(__VA_ARGS__) |
| #define vwaddu_wx_u32m1_m | ( | ... | ) | __riscv_vwaddu_wx_u32m1_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u32m2 | ( | ... | ) | __riscv_vwaddu_wx_u32m2(__VA_ARGS__) |
| #define vwaddu_wx_u32m2_m | ( | ... | ) | __riscv_vwaddu_wx_u32m2_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u32m4 | ( | ... | ) | __riscv_vwaddu_wx_u32m4(__VA_ARGS__) |
| #define vwaddu_wx_u32m4_m | ( | ... | ) | __riscv_vwaddu_wx_u32m4_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u32m8 | ( | ... | ) | __riscv_vwaddu_wx_u32m8(__VA_ARGS__) |
| #define vwaddu_wx_u32m8_m | ( | ... | ) | __riscv_vwaddu_wx_u32m8_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u32mf2 | ( | ... | ) | __riscv_vwaddu_wx_u32mf2(__VA_ARGS__) |
| #define vwaddu_wx_u32mf2_m | ( | ... | ) | __riscv_vwaddu_wx_u32mf2_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u64m1 | ( | ... | ) | __riscv_vwaddu_wx_u64m1(__VA_ARGS__) |
| #define vwaddu_wx_u64m1_m | ( | ... | ) | __riscv_vwaddu_wx_u64m1_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u64m2 | ( | ... | ) | __riscv_vwaddu_wx_u64m2(__VA_ARGS__) |
| #define vwaddu_wx_u64m2_m | ( | ... | ) | __riscv_vwaddu_wx_u64m2_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u64m4 | ( | ... | ) | __riscv_vwaddu_wx_u64m4(__VA_ARGS__) |
| #define vwaddu_wx_u64m4_m | ( | ... | ) | __riscv_vwaddu_wx_u64m4_tumu(__VA_ARGS__) |
| #define vwaddu_wx_u64m8 | ( | ... | ) | __riscv_vwaddu_wx_u64m8(__VA_ARGS__) |
| #define vwaddu_wx_u64m8_m | ( | ... | ) | __riscv_vwaddu_wx_u64m8_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16m1 | ( | ... | ) | __riscv_vwcvt_x_x_v_i16m1(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16m1_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i16m1_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16m2 | ( | ... | ) | __riscv_vwcvt_x_x_v_i16m2(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16m2_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i16m2_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16m4 | ( | ... | ) | __riscv_vwcvt_x_x_v_i16m4(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16m4_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i16m4_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16m8 | ( | ... | ) | __riscv_vwcvt_x_x_v_i16m8(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16m8_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i16m8_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16mf2 | ( | ... | ) | __riscv_vwcvt_x_x_v_i16mf2(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16mf2_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i16mf2_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16mf4 | ( | ... | ) | __riscv_vwcvt_x_x_v_i16mf4(__VA_ARGS__) |
| #define vwcvt_x_x_v_i16mf4_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i16mf4_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32m1 | ( | ... | ) | __riscv_vwcvt_x_x_v_i32m1(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32m1_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i32m1_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32m2 | ( | ... | ) | __riscv_vwcvt_x_x_v_i32m2(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32m2_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i32m2_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32m4 | ( | ... | ) | __riscv_vwcvt_x_x_v_i32m4(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32m4_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i32m4_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32m8 | ( | ... | ) | __riscv_vwcvt_x_x_v_i32m8(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32m8_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i32m8_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32mf2 | ( | ... | ) | __riscv_vwcvt_x_x_v_i32mf2(__VA_ARGS__) |
| #define vwcvt_x_x_v_i32mf2_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i32mf2_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i64m1 | ( | ... | ) | __riscv_vwcvt_x_x_v_i64m1(__VA_ARGS__) |
| #define vwcvt_x_x_v_i64m1_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i64m1_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i64m2 | ( | ... | ) | __riscv_vwcvt_x_x_v_i64m2(__VA_ARGS__) |
| #define vwcvt_x_x_v_i64m2_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i64m2_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i64m4 | ( | ... | ) | __riscv_vwcvt_x_x_v_i64m4(__VA_ARGS__) |
| #define vwcvt_x_x_v_i64m4_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i64m4_tumu(__VA_ARGS__) |
| #define vwcvt_x_x_v_i64m8 | ( | ... | ) | __riscv_vwcvt_x_x_v_i64m8(__VA_ARGS__) |
| #define vwcvt_x_x_v_i64m8_m | ( | ... | ) | __riscv_vwcvt_x_x_v_i64m8_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16m1 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16m1(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16m1_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16m1_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16m2 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16m2(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16m2_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16m2_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16m4 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16m4(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16m4_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16m4_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16m8 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16m8(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16m8_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16m8_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16mf2 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16mf2(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16mf2_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16mf2_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16mf4 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16mf4(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u16mf4_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u16mf4_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32m1 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32m1(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32m1_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32m1_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32m2 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32m2(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32m2_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32m2_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32m4 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32m4(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32m4_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32m4_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32m8 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32m8(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32m8_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32m8_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32mf2 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32mf2(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u32mf2_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u32mf2_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u64m1 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u64m1(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u64m1_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u64m1_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u64m2 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u64m2(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u64m2_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u64m2_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u64m4 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u64m4(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u64m4_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u64m4_tumu(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u64m8 | ( | ... | ) | __riscv_vwcvtu_x_x_v_u64m8(__VA_ARGS__) |
| #define vwcvtu_x_x_v_u64m8_m | ( | ... | ) | __riscv_vwcvtu_x_x_v_u64m8_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i16m1 | ( | ... | ) | __riscv_vwmacc_vv_i16m1_tu(__VA_ARGS__) |
| #define vwmacc_vv_i16m1_m | ( | ... | ) | __riscv_vwmacc_vv_i16m1_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i16m2 | ( | ... | ) | __riscv_vwmacc_vv_i16m2_tu(__VA_ARGS__) |
| #define vwmacc_vv_i16m2_m | ( | ... | ) | __riscv_vwmacc_vv_i16m2_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i16m4 | ( | ... | ) | __riscv_vwmacc_vv_i16m4_tu(__VA_ARGS__) |
| #define vwmacc_vv_i16m4_m | ( | ... | ) | __riscv_vwmacc_vv_i16m4_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i16m8 | ( | ... | ) | __riscv_vwmacc_vv_i16m8_tu(__VA_ARGS__) |
| #define vwmacc_vv_i16m8_m | ( | ... | ) | __riscv_vwmacc_vv_i16m8_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i16mf2 | ( | ... | ) | __riscv_vwmacc_vv_i16mf2_tu(__VA_ARGS__) |
| #define vwmacc_vv_i16mf2_m | ( | ... | ) | __riscv_vwmacc_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i16mf4 | ( | ... | ) | __riscv_vwmacc_vv_i16mf4_tu(__VA_ARGS__) |
| #define vwmacc_vv_i16mf4_m | ( | ... | ) | __riscv_vwmacc_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i32m1 | ( | ... | ) | __riscv_vwmacc_vv_i32m1_tu(__VA_ARGS__) |
| #define vwmacc_vv_i32m1_m | ( | ... | ) | __riscv_vwmacc_vv_i32m1_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i32m2 | ( | ... | ) | __riscv_vwmacc_vv_i32m2_tu(__VA_ARGS__) |
| #define vwmacc_vv_i32m2_m | ( | ... | ) | __riscv_vwmacc_vv_i32m2_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i32m4 | ( | ... | ) | __riscv_vwmacc_vv_i32m4_tu(__VA_ARGS__) |
| #define vwmacc_vv_i32m4_m | ( | ... | ) | __riscv_vwmacc_vv_i32m4_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i32m8 | ( | ... | ) | __riscv_vwmacc_vv_i32m8_tu(__VA_ARGS__) |
| #define vwmacc_vv_i32m8_m | ( | ... | ) | __riscv_vwmacc_vv_i32m8_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i32mf2 | ( | ... | ) | __riscv_vwmacc_vv_i32mf2_tu(__VA_ARGS__) |
| #define vwmacc_vv_i32mf2_m | ( | ... | ) | __riscv_vwmacc_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i64m1 | ( | ... | ) | __riscv_vwmacc_vv_i64m1_tu(__VA_ARGS__) |
| #define vwmacc_vv_i64m1_m | ( | ... | ) | __riscv_vwmacc_vv_i64m1_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i64m2 | ( | ... | ) | __riscv_vwmacc_vv_i64m2_tu(__VA_ARGS__) |
| #define vwmacc_vv_i64m2_m | ( | ... | ) | __riscv_vwmacc_vv_i64m2_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i64m4 | ( | ... | ) | __riscv_vwmacc_vv_i64m4_tu(__VA_ARGS__) |
| #define vwmacc_vv_i64m4_m | ( | ... | ) | __riscv_vwmacc_vv_i64m4_tumu(__VA_ARGS__) |
| #define vwmacc_vv_i64m8 | ( | ... | ) | __riscv_vwmacc_vv_i64m8_tu(__VA_ARGS__) |
| #define vwmacc_vv_i64m8_m | ( | ... | ) | __riscv_vwmacc_vv_i64m8_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i16m1 | ( | ... | ) | __riscv_vwmacc_vx_i16m1_tu(__VA_ARGS__) |
| #define vwmacc_vx_i16m1_m | ( | ... | ) | __riscv_vwmacc_vx_i16m1_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i16m2 | ( | ... | ) | __riscv_vwmacc_vx_i16m2_tu(__VA_ARGS__) |
| #define vwmacc_vx_i16m2_m | ( | ... | ) | __riscv_vwmacc_vx_i16m2_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i16m4 | ( | ... | ) | __riscv_vwmacc_vx_i16m4_tu(__VA_ARGS__) |
| #define vwmacc_vx_i16m4_m | ( | ... | ) | __riscv_vwmacc_vx_i16m4_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i16m8 | ( | ... | ) | __riscv_vwmacc_vx_i16m8_tu(__VA_ARGS__) |
| #define vwmacc_vx_i16m8_m | ( | ... | ) | __riscv_vwmacc_vx_i16m8_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i16mf2 | ( | ... | ) | __riscv_vwmacc_vx_i16mf2_tu(__VA_ARGS__) |
| #define vwmacc_vx_i16mf2_m | ( | ... | ) | __riscv_vwmacc_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i16mf4 | ( | ... | ) | __riscv_vwmacc_vx_i16mf4_tu(__VA_ARGS__) |
| #define vwmacc_vx_i16mf4_m | ( | ... | ) | __riscv_vwmacc_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i32m1 | ( | ... | ) | __riscv_vwmacc_vx_i32m1_tu(__VA_ARGS__) |
| #define vwmacc_vx_i32m1_m | ( | ... | ) | __riscv_vwmacc_vx_i32m1_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i32m2 | ( | ... | ) | __riscv_vwmacc_vx_i32m2_tu(__VA_ARGS__) |
| #define vwmacc_vx_i32m2_m | ( | ... | ) | __riscv_vwmacc_vx_i32m2_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i32m4 | ( | ... | ) | __riscv_vwmacc_vx_i32m4_tu(__VA_ARGS__) |
| #define vwmacc_vx_i32m4_m | ( | ... | ) | __riscv_vwmacc_vx_i32m4_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i32m8 | ( | ... | ) | __riscv_vwmacc_vx_i32m8_tu(__VA_ARGS__) |
| #define vwmacc_vx_i32m8_m | ( | ... | ) | __riscv_vwmacc_vx_i32m8_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i32mf2 | ( | ... | ) | __riscv_vwmacc_vx_i32mf2_tu(__VA_ARGS__) |
| #define vwmacc_vx_i32mf2_m | ( | ... | ) | __riscv_vwmacc_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i64m1 | ( | ... | ) | __riscv_vwmacc_vx_i64m1_tu(__VA_ARGS__) |
| #define vwmacc_vx_i64m1_m | ( | ... | ) | __riscv_vwmacc_vx_i64m1_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i64m2 | ( | ... | ) | __riscv_vwmacc_vx_i64m2_tu(__VA_ARGS__) |
| #define vwmacc_vx_i64m2_m | ( | ... | ) | __riscv_vwmacc_vx_i64m2_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i64m4 | ( | ... | ) | __riscv_vwmacc_vx_i64m4_tu(__VA_ARGS__) |
| #define vwmacc_vx_i64m4_m | ( | ... | ) | __riscv_vwmacc_vx_i64m4_tumu(__VA_ARGS__) |
| #define vwmacc_vx_i64m8 | ( | ... | ) | __riscv_vwmacc_vx_i64m8_tu(__VA_ARGS__) |
| #define vwmacc_vx_i64m8_m | ( | ... | ) | __riscv_vwmacc_vx_i64m8_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16m1 | ( | ... | ) | __riscv_vwmaccsu_vv_i16m1_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16m1_m | ( | ... | ) | __riscv_vwmaccsu_vv_i16m1_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16m2 | ( | ... | ) | __riscv_vwmaccsu_vv_i16m2_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16m2_m | ( | ... | ) | __riscv_vwmaccsu_vv_i16m2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16m4 | ( | ... | ) | __riscv_vwmaccsu_vv_i16m4_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16m4_m | ( | ... | ) | __riscv_vwmaccsu_vv_i16m4_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16m8 | ( | ... | ) | __riscv_vwmaccsu_vv_i16m8_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16m8_m | ( | ... | ) | __riscv_vwmaccsu_vv_i16m8_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16mf2 | ( | ... | ) | __riscv_vwmaccsu_vv_i16mf2_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16mf2_m | ( | ... | ) | __riscv_vwmaccsu_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16mf4 | ( | ... | ) | __riscv_vwmaccsu_vv_i16mf4_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i16mf4_m | ( | ... | ) | __riscv_vwmaccsu_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32m1 | ( | ... | ) | __riscv_vwmaccsu_vv_i32m1_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32m1_m | ( | ... | ) | __riscv_vwmaccsu_vv_i32m1_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32m2 | ( | ... | ) | __riscv_vwmaccsu_vv_i32m2_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32m2_m | ( | ... | ) | __riscv_vwmaccsu_vv_i32m2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32m4 | ( | ... | ) | __riscv_vwmaccsu_vv_i32m4_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32m4_m | ( | ... | ) | __riscv_vwmaccsu_vv_i32m4_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32m8 | ( | ... | ) | __riscv_vwmaccsu_vv_i32m8_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32m8_m | ( | ... | ) | __riscv_vwmaccsu_vv_i32m8_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32mf2 | ( | ... | ) | __riscv_vwmaccsu_vv_i32mf2_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i32mf2_m | ( | ... | ) | __riscv_vwmaccsu_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i64m1 | ( | ... | ) | __riscv_vwmaccsu_vv_i64m1_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i64m1_m | ( | ... | ) | __riscv_vwmaccsu_vv_i64m1_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i64m2 | ( | ... | ) | __riscv_vwmaccsu_vv_i64m2_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i64m2_m | ( | ... | ) | __riscv_vwmaccsu_vv_i64m2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i64m4 | ( | ... | ) | __riscv_vwmaccsu_vv_i64m4_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i64m4_m | ( | ... | ) | __riscv_vwmaccsu_vv_i64m4_tumu(__VA_ARGS__) |
| #define vwmaccsu_vv_i64m8 | ( | ... | ) | __riscv_vwmaccsu_vv_i64m8_tu(__VA_ARGS__) |
| #define vwmaccsu_vv_i64m8_m | ( | ... | ) | __riscv_vwmaccsu_vv_i64m8_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16m1 | ( | ... | ) | __riscv_vwmaccsu_vx_i16m1_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16m1_m | ( | ... | ) | __riscv_vwmaccsu_vx_i16m1_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16m2 | ( | ... | ) | __riscv_vwmaccsu_vx_i16m2_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16m2_m | ( | ... | ) | __riscv_vwmaccsu_vx_i16m2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16m4 | ( | ... | ) | __riscv_vwmaccsu_vx_i16m4_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16m4_m | ( | ... | ) | __riscv_vwmaccsu_vx_i16m4_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16m8 | ( | ... | ) | __riscv_vwmaccsu_vx_i16m8_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16m8_m | ( | ... | ) | __riscv_vwmaccsu_vx_i16m8_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16mf2 | ( | ... | ) | __riscv_vwmaccsu_vx_i16mf2_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16mf2_m | ( | ... | ) | __riscv_vwmaccsu_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16mf4 | ( | ... | ) | __riscv_vwmaccsu_vx_i16mf4_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i16mf4_m | ( | ... | ) | __riscv_vwmaccsu_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32m1 | ( | ... | ) | __riscv_vwmaccsu_vx_i32m1_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32m1_m | ( | ... | ) | __riscv_vwmaccsu_vx_i32m1_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32m2 | ( | ... | ) | __riscv_vwmaccsu_vx_i32m2_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32m2_m | ( | ... | ) | __riscv_vwmaccsu_vx_i32m2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32m4 | ( | ... | ) | __riscv_vwmaccsu_vx_i32m4_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32m4_m | ( | ... | ) | __riscv_vwmaccsu_vx_i32m4_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32m8 | ( | ... | ) | __riscv_vwmaccsu_vx_i32m8_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32m8_m | ( | ... | ) | __riscv_vwmaccsu_vx_i32m8_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32mf2 | ( | ... | ) | __riscv_vwmaccsu_vx_i32mf2_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i32mf2_m | ( | ... | ) | __riscv_vwmaccsu_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i64m1 | ( | ... | ) | __riscv_vwmaccsu_vx_i64m1_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i64m1_m | ( | ... | ) | __riscv_vwmaccsu_vx_i64m1_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i64m2 | ( | ... | ) | __riscv_vwmaccsu_vx_i64m2_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i64m2_m | ( | ... | ) | __riscv_vwmaccsu_vx_i64m2_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i64m4 | ( | ... | ) | __riscv_vwmaccsu_vx_i64m4_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i64m4_m | ( | ... | ) | __riscv_vwmaccsu_vx_i64m4_tumu(__VA_ARGS__) |
| #define vwmaccsu_vx_i64m8 | ( | ... | ) | __riscv_vwmaccsu_vx_i64m8_tu(__VA_ARGS__) |
| #define vwmaccsu_vx_i64m8_m | ( | ... | ) | __riscv_vwmaccsu_vx_i64m8_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u16m1 | ( | ... | ) | __riscv_vwmaccu_vv_u16m1_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u16m1_m | ( | ... | ) | __riscv_vwmaccu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u16m2 | ( | ... | ) | __riscv_vwmaccu_vv_u16m2_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u16m2_m | ( | ... | ) | __riscv_vwmaccu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u16m4 | ( | ... | ) | __riscv_vwmaccu_vv_u16m4_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u16m4_m | ( | ... | ) | __riscv_vwmaccu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u16m8 | ( | ... | ) | __riscv_vwmaccu_vv_u16m8_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u16m8_m | ( | ... | ) | __riscv_vwmaccu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u16mf2 | ( | ... | ) | __riscv_vwmaccu_vv_u16mf2_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u16mf2_m | ( | ... | ) | __riscv_vwmaccu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u16mf4 | ( | ... | ) | __riscv_vwmaccu_vv_u16mf4_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u16mf4_m | ( | ... | ) | __riscv_vwmaccu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u32m1 | ( | ... | ) | __riscv_vwmaccu_vv_u32m1_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u32m1_m | ( | ... | ) | __riscv_vwmaccu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u32m2 | ( | ... | ) | __riscv_vwmaccu_vv_u32m2_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u32m2_m | ( | ... | ) | __riscv_vwmaccu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u32m4 | ( | ... | ) | __riscv_vwmaccu_vv_u32m4_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u32m4_m | ( | ... | ) | __riscv_vwmaccu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u32m8 | ( | ... | ) | __riscv_vwmaccu_vv_u32m8_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u32m8_m | ( | ... | ) | __riscv_vwmaccu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u32mf2 | ( | ... | ) | __riscv_vwmaccu_vv_u32mf2_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u32mf2_m | ( | ... | ) | __riscv_vwmaccu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u64m1 | ( | ... | ) | __riscv_vwmaccu_vv_u64m1_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u64m1_m | ( | ... | ) | __riscv_vwmaccu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u64m2 | ( | ... | ) | __riscv_vwmaccu_vv_u64m2_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u64m2_m | ( | ... | ) | __riscv_vwmaccu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u64m4 | ( | ... | ) | __riscv_vwmaccu_vv_u64m4_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u64m4_m | ( | ... | ) | __riscv_vwmaccu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vwmaccu_vv_u64m8 | ( | ... | ) | __riscv_vwmaccu_vv_u64m8_tu(__VA_ARGS__) |
| #define vwmaccu_vv_u64m8_m | ( | ... | ) | __riscv_vwmaccu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u16m1 | ( | ... | ) | __riscv_vwmaccu_vx_u16m1_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u16m1_m | ( | ... | ) | __riscv_vwmaccu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u16m2 | ( | ... | ) | __riscv_vwmaccu_vx_u16m2_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u16m2_m | ( | ... | ) | __riscv_vwmaccu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u16m4 | ( | ... | ) | __riscv_vwmaccu_vx_u16m4_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u16m4_m | ( | ... | ) | __riscv_vwmaccu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u16m8 | ( | ... | ) | __riscv_vwmaccu_vx_u16m8_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u16m8_m | ( | ... | ) | __riscv_vwmaccu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u16mf2 | ( | ... | ) | __riscv_vwmaccu_vx_u16mf2_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u16mf2_m | ( | ... | ) | __riscv_vwmaccu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u16mf4 | ( | ... | ) | __riscv_vwmaccu_vx_u16mf4_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u16mf4_m | ( | ... | ) | __riscv_vwmaccu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u32m1 | ( | ... | ) | __riscv_vwmaccu_vx_u32m1_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u32m1_m | ( | ... | ) | __riscv_vwmaccu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u32m2 | ( | ... | ) | __riscv_vwmaccu_vx_u32m2_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u32m2_m | ( | ... | ) | __riscv_vwmaccu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u32m4 | ( | ... | ) | __riscv_vwmaccu_vx_u32m4_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u32m4_m | ( | ... | ) | __riscv_vwmaccu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u32m8 | ( | ... | ) | __riscv_vwmaccu_vx_u32m8_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u32m8_m | ( | ... | ) | __riscv_vwmaccu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u32mf2 | ( | ... | ) | __riscv_vwmaccu_vx_u32mf2_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u32mf2_m | ( | ... | ) | __riscv_vwmaccu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u64m1 | ( | ... | ) | __riscv_vwmaccu_vx_u64m1_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u64m1_m | ( | ... | ) | __riscv_vwmaccu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u64m2 | ( | ... | ) | __riscv_vwmaccu_vx_u64m2_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u64m2_m | ( | ... | ) | __riscv_vwmaccu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u64m4 | ( | ... | ) | __riscv_vwmaccu_vx_u64m4_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u64m4_m | ( | ... | ) | __riscv_vwmaccu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vwmaccu_vx_u64m8 | ( | ... | ) | __riscv_vwmaccu_vx_u64m8_tu(__VA_ARGS__) |
| #define vwmaccu_vx_u64m8_m | ( | ... | ) | __riscv_vwmaccu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i16m1 | ( | ... | ) | __riscv_vwmaccus_vx_i16m1_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i16m1_m | ( | ... | ) | __riscv_vwmaccus_vx_i16m1_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i16m2 | ( | ... | ) | __riscv_vwmaccus_vx_i16m2_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i16m2_m | ( | ... | ) | __riscv_vwmaccus_vx_i16m2_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i16m4 | ( | ... | ) | __riscv_vwmaccus_vx_i16m4_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i16m4_m | ( | ... | ) | __riscv_vwmaccus_vx_i16m4_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i16m8 | ( | ... | ) | __riscv_vwmaccus_vx_i16m8_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i16m8_m | ( | ... | ) | __riscv_vwmaccus_vx_i16m8_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i16mf2 | ( | ... | ) | __riscv_vwmaccus_vx_i16mf2_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i16mf2_m | ( | ... | ) | __riscv_vwmaccus_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i16mf4 | ( | ... | ) | __riscv_vwmaccus_vx_i16mf4_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i16mf4_m | ( | ... | ) | __riscv_vwmaccus_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i32m1 | ( | ... | ) | __riscv_vwmaccus_vx_i32m1_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i32m1_m | ( | ... | ) | __riscv_vwmaccus_vx_i32m1_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i32m2 | ( | ... | ) | __riscv_vwmaccus_vx_i32m2_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i32m2_m | ( | ... | ) | __riscv_vwmaccus_vx_i32m2_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i32m4 | ( | ... | ) | __riscv_vwmaccus_vx_i32m4_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i32m4_m | ( | ... | ) | __riscv_vwmaccus_vx_i32m4_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i32m8 | ( | ... | ) | __riscv_vwmaccus_vx_i32m8_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i32m8_m | ( | ... | ) | __riscv_vwmaccus_vx_i32m8_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i32mf2 | ( | ... | ) | __riscv_vwmaccus_vx_i32mf2_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i32mf2_m | ( | ... | ) | __riscv_vwmaccus_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i64m1 | ( | ... | ) | __riscv_vwmaccus_vx_i64m1_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i64m1_m | ( | ... | ) | __riscv_vwmaccus_vx_i64m1_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i64m2 | ( | ... | ) | __riscv_vwmaccus_vx_i64m2_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i64m2_m | ( | ... | ) | __riscv_vwmaccus_vx_i64m2_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i64m4 | ( | ... | ) | __riscv_vwmaccus_vx_i64m4_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i64m4_m | ( | ... | ) | __riscv_vwmaccus_vx_i64m4_tumu(__VA_ARGS__) |
| #define vwmaccus_vx_i64m8 | ( | ... | ) | __riscv_vwmaccus_vx_i64m8_tu(__VA_ARGS__) |
| #define vwmaccus_vx_i64m8_m | ( | ... | ) | __riscv_vwmaccus_vx_i64m8_tumu(__VA_ARGS__) |
| #define vwmul_vv_i16m1 | ( | ... | ) | __riscv_vwmul_vv_i16m1(__VA_ARGS__) |
| #define vwmul_vv_i16m1_m | ( | ... | ) | __riscv_vwmul_vv_i16m1_tumu(__VA_ARGS__) |
| #define vwmul_vv_i16m2 | ( | ... | ) | __riscv_vwmul_vv_i16m2(__VA_ARGS__) |
| #define vwmul_vv_i16m2_m | ( | ... | ) | __riscv_vwmul_vv_i16m2_tumu(__VA_ARGS__) |
| #define vwmul_vv_i16m4 | ( | ... | ) | __riscv_vwmul_vv_i16m4(__VA_ARGS__) |
| #define vwmul_vv_i16m4_m | ( | ... | ) | __riscv_vwmul_vv_i16m4_tumu(__VA_ARGS__) |
| #define vwmul_vv_i16m8 | ( | ... | ) | __riscv_vwmul_vv_i16m8(__VA_ARGS__) |
| #define vwmul_vv_i16m8_m | ( | ... | ) | __riscv_vwmul_vv_i16m8_tumu(__VA_ARGS__) |
| #define vwmul_vv_i16mf2 | ( | ... | ) | __riscv_vwmul_vv_i16mf2(__VA_ARGS__) |
| #define vwmul_vv_i16mf2_m | ( | ... | ) | __riscv_vwmul_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vwmul_vv_i16mf4 | ( | ... | ) | __riscv_vwmul_vv_i16mf4(__VA_ARGS__) |
| #define vwmul_vv_i16mf4_m | ( | ... | ) | __riscv_vwmul_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vwmul_vv_i32m1 | ( | ... | ) | __riscv_vwmul_vv_i32m1(__VA_ARGS__) |
| #define vwmul_vv_i32m1_m | ( | ... | ) | __riscv_vwmul_vv_i32m1_tumu(__VA_ARGS__) |
| #define vwmul_vv_i32m2 | ( | ... | ) | __riscv_vwmul_vv_i32m2(__VA_ARGS__) |
| #define vwmul_vv_i32m2_m | ( | ... | ) | __riscv_vwmul_vv_i32m2_tumu(__VA_ARGS__) |
| #define vwmul_vv_i32m4 | ( | ... | ) | __riscv_vwmul_vv_i32m4(__VA_ARGS__) |
| #define vwmul_vv_i32m4_m | ( | ... | ) | __riscv_vwmul_vv_i32m4_tumu(__VA_ARGS__) |
| #define vwmul_vv_i32m8 | ( | ... | ) | __riscv_vwmul_vv_i32m8(__VA_ARGS__) |
| #define vwmul_vv_i32m8_m | ( | ... | ) | __riscv_vwmul_vv_i32m8_tumu(__VA_ARGS__) |
| #define vwmul_vv_i32mf2 | ( | ... | ) | __riscv_vwmul_vv_i32mf2(__VA_ARGS__) |
| #define vwmul_vv_i32mf2_m | ( | ... | ) | __riscv_vwmul_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vwmul_vv_i64m1 | ( | ... | ) | __riscv_vwmul_vv_i64m1(__VA_ARGS__) |
| #define vwmul_vv_i64m1_m | ( | ... | ) | __riscv_vwmul_vv_i64m1_tumu(__VA_ARGS__) |
| #define vwmul_vv_i64m2 | ( | ... | ) | __riscv_vwmul_vv_i64m2(__VA_ARGS__) |
| #define vwmul_vv_i64m2_m | ( | ... | ) | __riscv_vwmul_vv_i64m2_tumu(__VA_ARGS__) |
| #define vwmul_vv_i64m4 | ( | ... | ) | __riscv_vwmul_vv_i64m4(__VA_ARGS__) |
| #define vwmul_vv_i64m4_m | ( | ... | ) | __riscv_vwmul_vv_i64m4_tumu(__VA_ARGS__) |
| #define vwmul_vv_i64m8 | ( | ... | ) | __riscv_vwmul_vv_i64m8(__VA_ARGS__) |
| #define vwmul_vv_i64m8_m | ( | ... | ) | __riscv_vwmul_vv_i64m8_tumu(__VA_ARGS__) |
| #define vwmul_vx_i16m1 | ( | ... | ) | __riscv_vwmul_vx_i16m1(__VA_ARGS__) |
| #define vwmul_vx_i16m1_m | ( | ... | ) | __riscv_vwmul_vx_i16m1_tumu(__VA_ARGS__) |
| #define vwmul_vx_i16m2 | ( | ... | ) | __riscv_vwmul_vx_i16m2(__VA_ARGS__) |
| #define vwmul_vx_i16m2_m | ( | ... | ) | __riscv_vwmul_vx_i16m2_tumu(__VA_ARGS__) |
| #define vwmul_vx_i16m4 | ( | ... | ) | __riscv_vwmul_vx_i16m4(__VA_ARGS__) |
| #define vwmul_vx_i16m4_m | ( | ... | ) | __riscv_vwmul_vx_i16m4_tumu(__VA_ARGS__) |
| #define vwmul_vx_i16m8 | ( | ... | ) | __riscv_vwmul_vx_i16m8(__VA_ARGS__) |
| #define vwmul_vx_i16m8_m | ( | ... | ) | __riscv_vwmul_vx_i16m8_tumu(__VA_ARGS__) |
| #define vwmul_vx_i16mf2 | ( | ... | ) | __riscv_vwmul_vx_i16mf2(__VA_ARGS__) |
| #define vwmul_vx_i16mf2_m | ( | ... | ) | __riscv_vwmul_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vwmul_vx_i16mf4 | ( | ... | ) | __riscv_vwmul_vx_i16mf4(__VA_ARGS__) |
| #define vwmul_vx_i16mf4_m | ( | ... | ) | __riscv_vwmul_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vwmul_vx_i32m1 | ( | ... | ) | __riscv_vwmul_vx_i32m1(__VA_ARGS__) |
| #define vwmul_vx_i32m1_m | ( | ... | ) | __riscv_vwmul_vx_i32m1_tumu(__VA_ARGS__) |
| #define vwmul_vx_i32m2 | ( | ... | ) | __riscv_vwmul_vx_i32m2(__VA_ARGS__) |
| #define vwmul_vx_i32m2_m | ( | ... | ) | __riscv_vwmul_vx_i32m2_tumu(__VA_ARGS__) |
| #define vwmul_vx_i32m4 | ( | ... | ) | __riscv_vwmul_vx_i32m4(__VA_ARGS__) |
| #define vwmul_vx_i32m4_m | ( | ... | ) | __riscv_vwmul_vx_i32m4_tumu(__VA_ARGS__) |
| #define vwmul_vx_i32m8 | ( | ... | ) | __riscv_vwmul_vx_i32m8(__VA_ARGS__) |
| #define vwmul_vx_i32m8_m | ( | ... | ) | __riscv_vwmul_vx_i32m8_tumu(__VA_ARGS__) |
| #define vwmul_vx_i32mf2 | ( | ... | ) | __riscv_vwmul_vx_i32mf2(__VA_ARGS__) |
| #define vwmul_vx_i32mf2_m | ( | ... | ) | __riscv_vwmul_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vwmul_vx_i64m1 | ( | ... | ) | __riscv_vwmul_vx_i64m1(__VA_ARGS__) |
| #define vwmul_vx_i64m1_m | ( | ... | ) | __riscv_vwmul_vx_i64m1_tumu(__VA_ARGS__) |
| #define vwmul_vx_i64m2 | ( | ... | ) | __riscv_vwmul_vx_i64m2(__VA_ARGS__) |
| #define vwmul_vx_i64m2_m | ( | ... | ) | __riscv_vwmul_vx_i64m2_tumu(__VA_ARGS__) |
| #define vwmul_vx_i64m4 | ( | ... | ) | __riscv_vwmul_vx_i64m4(__VA_ARGS__) |
| #define vwmul_vx_i64m4_m | ( | ... | ) | __riscv_vwmul_vx_i64m4_tumu(__VA_ARGS__) |
| #define vwmul_vx_i64m8 | ( | ... | ) | __riscv_vwmul_vx_i64m8(__VA_ARGS__) |
| #define vwmul_vx_i64m8_m | ( | ... | ) | __riscv_vwmul_vx_i64m8_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i16m1 | ( | ... | ) | __riscv_vwmulsu_vv_i16m1(__VA_ARGS__) |
| #define vwmulsu_vv_i16m1_m | ( | ... | ) | __riscv_vwmulsu_vv_i16m1_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i16m2 | ( | ... | ) | __riscv_vwmulsu_vv_i16m2(__VA_ARGS__) |
| #define vwmulsu_vv_i16m2_m | ( | ... | ) | __riscv_vwmulsu_vv_i16m2_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i16m4 | ( | ... | ) | __riscv_vwmulsu_vv_i16m4(__VA_ARGS__) |
| #define vwmulsu_vv_i16m4_m | ( | ... | ) | __riscv_vwmulsu_vv_i16m4_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i16m8 | ( | ... | ) | __riscv_vwmulsu_vv_i16m8(__VA_ARGS__) |
| #define vwmulsu_vv_i16m8_m | ( | ... | ) | __riscv_vwmulsu_vv_i16m8_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i16mf2 | ( | ... | ) | __riscv_vwmulsu_vv_i16mf2(__VA_ARGS__) |
| #define vwmulsu_vv_i16mf2_m | ( | ... | ) | __riscv_vwmulsu_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i16mf4 | ( | ... | ) | __riscv_vwmulsu_vv_i16mf4(__VA_ARGS__) |
| #define vwmulsu_vv_i16mf4_m | ( | ... | ) | __riscv_vwmulsu_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i32m1 | ( | ... | ) | __riscv_vwmulsu_vv_i32m1(__VA_ARGS__) |
| #define vwmulsu_vv_i32m1_m | ( | ... | ) | __riscv_vwmulsu_vv_i32m1_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i32m2 | ( | ... | ) | __riscv_vwmulsu_vv_i32m2(__VA_ARGS__) |
| #define vwmulsu_vv_i32m2_m | ( | ... | ) | __riscv_vwmulsu_vv_i32m2_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i32m4 | ( | ... | ) | __riscv_vwmulsu_vv_i32m4(__VA_ARGS__) |
| #define vwmulsu_vv_i32m4_m | ( | ... | ) | __riscv_vwmulsu_vv_i32m4_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i32m8 | ( | ... | ) | __riscv_vwmulsu_vv_i32m8(__VA_ARGS__) |
| #define vwmulsu_vv_i32m8_m | ( | ... | ) | __riscv_vwmulsu_vv_i32m8_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i32mf2 | ( | ... | ) | __riscv_vwmulsu_vv_i32mf2(__VA_ARGS__) |
| #define vwmulsu_vv_i32mf2_m | ( | ... | ) | __riscv_vwmulsu_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i64m1 | ( | ... | ) | __riscv_vwmulsu_vv_i64m1(__VA_ARGS__) |
| #define vwmulsu_vv_i64m1_m | ( | ... | ) | __riscv_vwmulsu_vv_i64m1_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i64m2 | ( | ... | ) | __riscv_vwmulsu_vv_i64m2(__VA_ARGS__) |
| #define vwmulsu_vv_i64m2_m | ( | ... | ) | __riscv_vwmulsu_vv_i64m2_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i64m4 | ( | ... | ) | __riscv_vwmulsu_vv_i64m4(__VA_ARGS__) |
| #define vwmulsu_vv_i64m4_m | ( | ... | ) | __riscv_vwmulsu_vv_i64m4_tumu(__VA_ARGS__) |
| #define vwmulsu_vv_i64m8 | ( | ... | ) | __riscv_vwmulsu_vv_i64m8(__VA_ARGS__) |
| #define vwmulsu_vv_i64m8_m | ( | ... | ) | __riscv_vwmulsu_vv_i64m8_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i16m1 | ( | ... | ) | __riscv_vwmulsu_vx_i16m1(__VA_ARGS__) |
| #define vwmulsu_vx_i16m1_m | ( | ... | ) | __riscv_vwmulsu_vx_i16m1_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i16m2 | ( | ... | ) | __riscv_vwmulsu_vx_i16m2(__VA_ARGS__) |
| #define vwmulsu_vx_i16m2_m | ( | ... | ) | __riscv_vwmulsu_vx_i16m2_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i16m4 | ( | ... | ) | __riscv_vwmulsu_vx_i16m4(__VA_ARGS__) |
| #define vwmulsu_vx_i16m4_m | ( | ... | ) | __riscv_vwmulsu_vx_i16m4_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i16m8 | ( | ... | ) | __riscv_vwmulsu_vx_i16m8(__VA_ARGS__) |
| #define vwmulsu_vx_i16m8_m | ( | ... | ) | __riscv_vwmulsu_vx_i16m8_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i16mf2 | ( | ... | ) | __riscv_vwmulsu_vx_i16mf2(__VA_ARGS__) |
| #define vwmulsu_vx_i16mf2_m | ( | ... | ) | __riscv_vwmulsu_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i16mf4 | ( | ... | ) | __riscv_vwmulsu_vx_i16mf4(__VA_ARGS__) |
| #define vwmulsu_vx_i16mf4_m | ( | ... | ) | __riscv_vwmulsu_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i32m1 | ( | ... | ) | __riscv_vwmulsu_vx_i32m1(__VA_ARGS__) |
| #define vwmulsu_vx_i32m1_m | ( | ... | ) | __riscv_vwmulsu_vx_i32m1_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i32m2 | ( | ... | ) | __riscv_vwmulsu_vx_i32m2(__VA_ARGS__) |
| #define vwmulsu_vx_i32m2_m | ( | ... | ) | __riscv_vwmulsu_vx_i32m2_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i32m4 | ( | ... | ) | __riscv_vwmulsu_vx_i32m4(__VA_ARGS__) |
| #define vwmulsu_vx_i32m4_m | ( | ... | ) | __riscv_vwmulsu_vx_i32m4_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i32m8 | ( | ... | ) | __riscv_vwmulsu_vx_i32m8(__VA_ARGS__) |
| #define vwmulsu_vx_i32m8_m | ( | ... | ) | __riscv_vwmulsu_vx_i32m8_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i32mf2 | ( | ... | ) | __riscv_vwmulsu_vx_i32mf2(__VA_ARGS__) |
| #define vwmulsu_vx_i32mf2_m | ( | ... | ) | __riscv_vwmulsu_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i64m1 | ( | ... | ) | __riscv_vwmulsu_vx_i64m1(__VA_ARGS__) |
| #define vwmulsu_vx_i64m1_m | ( | ... | ) | __riscv_vwmulsu_vx_i64m1_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i64m2 | ( | ... | ) | __riscv_vwmulsu_vx_i64m2(__VA_ARGS__) |
| #define vwmulsu_vx_i64m2_m | ( | ... | ) | __riscv_vwmulsu_vx_i64m2_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i64m4 | ( | ... | ) | __riscv_vwmulsu_vx_i64m4(__VA_ARGS__) |
| #define vwmulsu_vx_i64m4_m | ( | ... | ) | __riscv_vwmulsu_vx_i64m4_tumu(__VA_ARGS__) |
| #define vwmulsu_vx_i64m8 | ( | ... | ) | __riscv_vwmulsu_vx_i64m8(__VA_ARGS__) |
| #define vwmulsu_vx_i64m8_m | ( | ... | ) | __riscv_vwmulsu_vx_i64m8_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u16m1 | ( | ... | ) | __riscv_vwmulu_vv_u16m1(__VA_ARGS__) |
| #define vwmulu_vv_u16m1_m | ( | ... | ) | __riscv_vwmulu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u16m2 | ( | ... | ) | __riscv_vwmulu_vv_u16m2(__VA_ARGS__) |
| #define vwmulu_vv_u16m2_m | ( | ... | ) | __riscv_vwmulu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u16m4 | ( | ... | ) | __riscv_vwmulu_vv_u16m4(__VA_ARGS__) |
| #define vwmulu_vv_u16m4_m | ( | ... | ) | __riscv_vwmulu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u16m8 | ( | ... | ) | __riscv_vwmulu_vv_u16m8(__VA_ARGS__) |
| #define vwmulu_vv_u16m8_m | ( | ... | ) | __riscv_vwmulu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u16mf2 | ( | ... | ) | __riscv_vwmulu_vv_u16mf2(__VA_ARGS__) |
| #define vwmulu_vv_u16mf2_m | ( | ... | ) | __riscv_vwmulu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u16mf4 | ( | ... | ) | __riscv_vwmulu_vv_u16mf4(__VA_ARGS__) |
| #define vwmulu_vv_u16mf4_m | ( | ... | ) | __riscv_vwmulu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u32m1 | ( | ... | ) | __riscv_vwmulu_vv_u32m1(__VA_ARGS__) |
| #define vwmulu_vv_u32m1_m | ( | ... | ) | __riscv_vwmulu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u32m2 | ( | ... | ) | __riscv_vwmulu_vv_u32m2(__VA_ARGS__) |
| #define vwmulu_vv_u32m2_m | ( | ... | ) | __riscv_vwmulu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u32m4 | ( | ... | ) | __riscv_vwmulu_vv_u32m4(__VA_ARGS__) |
| #define vwmulu_vv_u32m4_m | ( | ... | ) | __riscv_vwmulu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u32m8 | ( | ... | ) | __riscv_vwmulu_vv_u32m8(__VA_ARGS__) |
| #define vwmulu_vv_u32m8_m | ( | ... | ) | __riscv_vwmulu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u32mf2 | ( | ... | ) | __riscv_vwmulu_vv_u32mf2(__VA_ARGS__) |
| #define vwmulu_vv_u32mf2_m | ( | ... | ) | __riscv_vwmulu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u64m1 | ( | ... | ) | __riscv_vwmulu_vv_u64m1(__VA_ARGS__) |
| #define vwmulu_vv_u64m1_m | ( | ... | ) | __riscv_vwmulu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u64m2 | ( | ... | ) | __riscv_vwmulu_vv_u64m2(__VA_ARGS__) |
| #define vwmulu_vv_u64m2_m | ( | ... | ) | __riscv_vwmulu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u64m4 | ( | ... | ) | __riscv_vwmulu_vv_u64m4(__VA_ARGS__) |
| #define vwmulu_vv_u64m4_m | ( | ... | ) | __riscv_vwmulu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vwmulu_vv_u64m8 | ( | ... | ) | __riscv_vwmulu_vv_u64m8(__VA_ARGS__) |
| #define vwmulu_vv_u64m8_m | ( | ... | ) | __riscv_vwmulu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u16m1 | ( | ... | ) | __riscv_vwmulu_vx_u16m1(__VA_ARGS__) |
| #define vwmulu_vx_u16m1_m | ( | ... | ) | __riscv_vwmulu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u16m2 | ( | ... | ) | __riscv_vwmulu_vx_u16m2(__VA_ARGS__) |
| #define vwmulu_vx_u16m2_m | ( | ... | ) | __riscv_vwmulu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u16m4 | ( | ... | ) | __riscv_vwmulu_vx_u16m4(__VA_ARGS__) |
| #define vwmulu_vx_u16m4_m | ( | ... | ) | __riscv_vwmulu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u16m8 | ( | ... | ) | __riscv_vwmulu_vx_u16m8(__VA_ARGS__) |
| #define vwmulu_vx_u16m8_m | ( | ... | ) | __riscv_vwmulu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u16mf2 | ( | ... | ) | __riscv_vwmulu_vx_u16mf2(__VA_ARGS__) |
| #define vwmulu_vx_u16mf2_m | ( | ... | ) | __riscv_vwmulu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u16mf4 | ( | ... | ) | __riscv_vwmulu_vx_u16mf4(__VA_ARGS__) |
| #define vwmulu_vx_u16mf4_m | ( | ... | ) | __riscv_vwmulu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u32m1 | ( | ... | ) | __riscv_vwmulu_vx_u32m1(__VA_ARGS__) |
| #define vwmulu_vx_u32m1_m | ( | ... | ) | __riscv_vwmulu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u32m2 | ( | ... | ) | __riscv_vwmulu_vx_u32m2(__VA_ARGS__) |
| #define vwmulu_vx_u32m2_m | ( | ... | ) | __riscv_vwmulu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u32m4 | ( | ... | ) | __riscv_vwmulu_vx_u32m4(__VA_ARGS__) |
| #define vwmulu_vx_u32m4_m | ( | ... | ) | __riscv_vwmulu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u32m8 | ( | ... | ) | __riscv_vwmulu_vx_u32m8(__VA_ARGS__) |
| #define vwmulu_vx_u32m8_m | ( | ... | ) | __riscv_vwmulu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u32mf2 | ( | ... | ) | __riscv_vwmulu_vx_u32mf2(__VA_ARGS__) |
| #define vwmulu_vx_u32mf2_m | ( | ... | ) | __riscv_vwmulu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u64m1 | ( | ... | ) | __riscv_vwmulu_vx_u64m1(__VA_ARGS__) |
| #define vwmulu_vx_u64m1_m | ( | ... | ) | __riscv_vwmulu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u64m2 | ( | ... | ) | __riscv_vwmulu_vx_u64m2(__VA_ARGS__) |
| #define vwmulu_vx_u64m2_m | ( | ... | ) | __riscv_vwmulu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u64m4 | ( | ... | ) | __riscv_vwmulu_vx_u64m4(__VA_ARGS__) |
| #define vwmulu_vx_u64m4_m | ( | ... | ) | __riscv_vwmulu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vwmulu_vx_u64m8 | ( | ... | ) | __riscv_vwmulu_vx_u64m8(__VA_ARGS__) |
| #define vwmulu_vx_u64m8_m | ( | ... | ) | __riscv_vwmulu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vwredsum_vs_i16m1_i32m1 | ( | ... | ) | __riscv_vwredsum_vs_i16m1_i32m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i16m1_i32m1_m | ( | ... | ) | __riscv_vwredsum_vs_i16m1_i32m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i16m2_i32m1 | ( | ... | ) | __riscv_vwredsum_vs_i16m2_i32m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i16m2_i32m1_m | ( | ... | ) | __riscv_vwredsum_vs_i16m2_i32m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i16m4_i32m1 | ( | ... | ) | __riscv_vwredsum_vs_i16m4_i32m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i16m4_i32m1_m | ( | ... | ) | __riscv_vwredsum_vs_i16m4_i32m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i16m8_i32m1 | ( | ... | ) | __riscv_vwredsum_vs_i16m8_i32m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i16m8_i32m1_m | ( | ... | ) | __riscv_vwredsum_vs_i16m8_i32m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i16mf2_i32m1 | ( | ... | ) | __riscv_vwredsum_vs_i16mf2_i32m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i16mf2_i32m1_m | ( | ... | ) | __riscv_vwredsum_vs_i16mf2_i32m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i16mf4_i32m1 | ( | ... | ) | __riscv_vwredsum_vs_i16mf4_i32m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i16mf4_i32m1_m | ( | ... | ) | __riscv_vwredsum_vs_i16mf4_i32m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i32m1_i64m1 | ( | ... | ) | __riscv_vwredsum_vs_i32m1_i64m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i32m1_i64m1_m | ( | ... | ) | __riscv_vwredsum_vs_i32m1_i64m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i32m2_i64m1 | ( | ... | ) | __riscv_vwredsum_vs_i32m2_i64m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i32m2_i64m1_m | ( | ... | ) | __riscv_vwredsum_vs_i32m2_i64m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i32m4_i64m1 | ( | ... | ) | __riscv_vwredsum_vs_i32m4_i64m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i32m4_i64m1_m | ( | ... | ) | __riscv_vwredsum_vs_i32m4_i64m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i32m8_i64m1 | ( | ... | ) | __riscv_vwredsum_vs_i32m8_i64m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i32m8_i64m1_m | ( | ... | ) | __riscv_vwredsum_vs_i32m8_i64m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i32mf2_i64m1 | ( | ... | ) | __riscv_vwredsum_vs_i32mf2_i64m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i32mf2_i64m1_m | ( | ... | ) | __riscv_vwredsum_vs_i32mf2_i64m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i8m1_i16m1 | ( | ... | ) | __riscv_vwredsum_vs_i8m1_i16m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i8m1_i16m1_m | ( | ... | ) | __riscv_vwredsum_vs_i8m1_i16m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i8m2_i16m1 | ( | ... | ) | __riscv_vwredsum_vs_i8m2_i16m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i8m2_i16m1_m | ( | ... | ) | __riscv_vwredsum_vs_i8m2_i16m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i8m4_i16m1 | ( | ... | ) | __riscv_vwredsum_vs_i8m4_i16m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i8m4_i16m1_m | ( | ... | ) | __riscv_vwredsum_vs_i8m4_i16m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i8m8_i16m1 | ( | ... | ) | __riscv_vwredsum_vs_i8m8_i16m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i8m8_i16m1_m | ( | ... | ) | __riscv_vwredsum_vs_i8m8_i16m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i8mf2_i16m1 | ( | ... | ) | __riscv_vwredsum_vs_i8mf2_i16m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i8mf2_i16m1_m | ( | ... | ) | __riscv_vwredsum_vs_i8mf2_i16m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i8mf4_i16m1 | ( | ... | ) | __riscv_vwredsum_vs_i8mf4_i16m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i8mf4_i16m1_m | ( | ... | ) | __riscv_vwredsum_vs_i8mf4_i16m1_tum(__VA_ARGS__) |
| #define vwredsum_vs_i8mf8_i16m1 | ( | ... | ) | __riscv_vwredsum_vs_i8mf8_i16m1_tu(__VA_ARGS__) |
| #define vwredsum_vs_i8mf8_i16m1_m | ( | ... | ) | __riscv_vwredsum_vs_i8mf8_i16m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u16m1_u32m1 | ( | ... | ) | __riscv_vwredsumu_vs_u16m1_u32m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u16m1_u32m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u16m1_u32m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u16m2_u32m1 | ( | ... | ) | __riscv_vwredsumu_vs_u16m2_u32m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u16m2_u32m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u16m2_u32m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u16m4_u32m1 | ( | ... | ) | __riscv_vwredsumu_vs_u16m4_u32m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u16m4_u32m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u16m4_u32m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u16m8_u32m1 | ( | ... | ) | __riscv_vwredsumu_vs_u16m8_u32m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u16m8_u32m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u16m8_u32m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u16mf2_u32m1 | ( | ... | ) | __riscv_vwredsumu_vs_u16mf2_u32m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u16mf2_u32m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u16mf2_u32m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u16mf4_u32m1 | ( | ... | ) | __riscv_vwredsumu_vs_u16mf4_u32m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u16mf4_u32m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u16mf4_u32m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u32m1_u64m1 | ( | ... | ) | __riscv_vwredsumu_vs_u32m1_u64m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u32m1_u64m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u32m1_u64m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u32m2_u64m1 | ( | ... | ) | __riscv_vwredsumu_vs_u32m2_u64m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u32m2_u64m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u32m2_u64m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u32m4_u64m1 | ( | ... | ) | __riscv_vwredsumu_vs_u32m4_u64m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u32m4_u64m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u32m4_u64m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u32m8_u64m1 | ( | ... | ) | __riscv_vwredsumu_vs_u32m8_u64m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u32m8_u64m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u32m8_u64m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u32mf2_u64m1 | ( | ... | ) | __riscv_vwredsumu_vs_u32mf2_u64m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u32mf2_u64m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u32mf2_u64m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u8m1_u16m1 | ( | ... | ) | __riscv_vwredsumu_vs_u8m1_u16m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u8m1_u16m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u8m1_u16m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u8m2_u16m1 | ( | ... | ) | __riscv_vwredsumu_vs_u8m2_u16m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u8m2_u16m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u8m2_u16m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u8m4_u16m1 | ( | ... | ) | __riscv_vwredsumu_vs_u8m4_u16m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u8m4_u16m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u8m4_u16m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u8m8_u16m1 | ( | ... | ) | __riscv_vwredsumu_vs_u8m8_u16m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u8m8_u16m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u8m8_u16m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u8mf2_u16m1 | ( | ... | ) | __riscv_vwredsumu_vs_u8mf2_u16m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u8mf2_u16m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u8mf2_u16m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u8mf4_u16m1 | ( | ... | ) | __riscv_vwredsumu_vs_u8mf4_u16m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u8mf4_u16m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u8mf4_u16m1_tum(__VA_ARGS__) |
| #define vwredsumu_vs_u8mf8_u16m1 | ( | ... | ) | __riscv_vwredsumu_vs_u8mf8_u16m1_tu(__VA_ARGS__) |
| #define vwredsumu_vs_u8mf8_u16m1_m | ( | ... | ) | __riscv_vwredsumu_vs_u8mf8_u16m1_tum(__VA_ARGS__) |
| #define vwsub_vv_i16m1 | ( | ... | ) | __riscv_vwsub_vv_i16m1(__VA_ARGS__) |
| #define vwsub_vv_i16m1_m | ( | ... | ) | __riscv_vwsub_vv_i16m1_tumu(__VA_ARGS__) |
| #define vwsub_vv_i16m2 | ( | ... | ) | __riscv_vwsub_vv_i16m2(__VA_ARGS__) |
| #define vwsub_vv_i16m2_m | ( | ... | ) | __riscv_vwsub_vv_i16m2_tumu(__VA_ARGS__) |
| #define vwsub_vv_i16m4 | ( | ... | ) | __riscv_vwsub_vv_i16m4(__VA_ARGS__) |
| #define vwsub_vv_i16m4_m | ( | ... | ) | __riscv_vwsub_vv_i16m4_tumu(__VA_ARGS__) |
| #define vwsub_vv_i16m8 | ( | ... | ) | __riscv_vwsub_vv_i16m8(__VA_ARGS__) |
| #define vwsub_vv_i16m8_m | ( | ... | ) | __riscv_vwsub_vv_i16m8_tumu(__VA_ARGS__) |
| #define vwsub_vv_i16mf2 | ( | ... | ) | __riscv_vwsub_vv_i16mf2(__VA_ARGS__) |
| #define vwsub_vv_i16mf2_m | ( | ... | ) | __riscv_vwsub_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vwsub_vv_i16mf4 | ( | ... | ) | __riscv_vwsub_vv_i16mf4(__VA_ARGS__) |
| #define vwsub_vv_i16mf4_m | ( | ... | ) | __riscv_vwsub_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vwsub_vv_i32m1 | ( | ... | ) | __riscv_vwsub_vv_i32m1(__VA_ARGS__) |
| #define vwsub_vv_i32m1_m | ( | ... | ) | __riscv_vwsub_vv_i32m1_tumu(__VA_ARGS__) |
| #define vwsub_vv_i32m2 | ( | ... | ) | __riscv_vwsub_vv_i32m2(__VA_ARGS__) |
| #define vwsub_vv_i32m2_m | ( | ... | ) | __riscv_vwsub_vv_i32m2_tumu(__VA_ARGS__) |
| #define vwsub_vv_i32m4 | ( | ... | ) | __riscv_vwsub_vv_i32m4(__VA_ARGS__) |
| #define vwsub_vv_i32m4_m | ( | ... | ) | __riscv_vwsub_vv_i32m4_tumu(__VA_ARGS__) |
| #define vwsub_vv_i32m8 | ( | ... | ) | __riscv_vwsub_vv_i32m8(__VA_ARGS__) |
| #define vwsub_vv_i32m8_m | ( | ... | ) | __riscv_vwsub_vv_i32m8_tumu(__VA_ARGS__) |
| #define vwsub_vv_i32mf2 | ( | ... | ) | __riscv_vwsub_vv_i32mf2(__VA_ARGS__) |
| #define vwsub_vv_i32mf2_m | ( | ... | ) | __riscv_vwsub_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vwsub_vv_i64m1 | ( | ... | ) | __riscv_vwsub_vv_i64m1(__VA_ARGS__) |
| #define vwsub_vv_i64m1_m | ( | ... | ) | __riscv_vwsub_vv_i64m1_tumu(__VA_ARGS__) |
| #define vwsub_vv_i64m2 | ( | ... | ) | __riscv_vwsub_vv_i64m2(__VA_ARGS__) |
| #define vwsub_vv_i64m2_m | ( | ... | ) | __riscv_vwsub_vv_i64m2_tumu(__VA_ARGS__) |
| #define vwsub_vv_i64m4 | ( | ... | ) | __riscv_vwsub_vv_i64m4(__VA_ARGS__) |
| #define vwsub_vv_i64m4_m | ( | ... | ) | __riscv_vwsub_vv_i64m4_tumu(__VA_ARGS__) |
| #define vwsub_vv_i64m8 | ( | ... | ) | __riscv_vwsub_vv_i64m8(__VA_ARGS__) |
| #define vwsub_vv_i64m8_m | ( | ... | ) | __riscv_vwsub_vv_i64m8_tumu(__VA_ARGS__) |
| #define vwsub_vx_i16m1 | ( | ... | ) | __riscv_vwsub_vx_i16m1(__VA_ARGS__) |
| #define vwsub_vx_i16m1_m | ( | ... | ) | __riscv_vwsub_vx_i16m1_tumu(__VA_ARGS__) |
| #define vwsub_vx_i16m2 | ( | ... | ) | __riscv_vwsub_vx_i16m2(__VA_ARGS__) |
| #define vwsub_vx_i16m2_m | ( | ... | ) | __riscv_vwsub_vx_i16m2_tumu(__VA_ARGS__) |
| #define vwsub_vx_i16m4 | ( | ... | ) | __riscv_vwsub_vx_i16m4(__VA_ARGS__) |
| #define vwsub_vx_i16m4_m | ( | ... | ) | __riscv_vwsub_vx_i16m4_tumu(__VA_ARGS__) |
| #define vwsub_vx_i16m8 | ( | ... | ) | __riscv_vwsub_vx_i16m8(__VA_ARGS__) |
| #define vwsub_vx_i16m8_m | ( | ... | ) | __riscv_vwsub_vx_i16m8_tumu(__VA_ARGS__) |
| #define vwsub_vx_i16mf2 | ( | ... | ) | __riscv_vwsub_vx_i16mf2(__VA_ARGS__) |
| #define vwsub_vx_i16mf2_m | ( | ... | ) | __riscv_vwsub_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vwsub_vx_i16mf4 | ( | ... | ) | __riscv_vwsub_vx_i16mf4(__VA_ARGS__) |
| #define vwsub_vx_i16mf4_m | ( | ... | ) | __riscv_vwsub_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vwsub_vx_i32m1 | ( | ... | ) | __riscv_vwsub_vx_i32m1(__VA_ARGS__) |
| #define vwsub_vx_i32m1_m | ( | ... | ) | __riscv_vwsub_vx_i32m1_tumu(__VA_ARGS__) |
| #define vwsub_vx_i32m2 | ( | ... | ) | __riscv_vwsub_vx_i32m2(__VA_ARGS__) |
| #define vwsub_vx_i32m2_m | ( | ... | ) | __riscv_vwsub_vx_i32m2_tumu(__VA_ARGS__) |
| #define vwsub_vx_i32m4 | ( | ... | ) | __riscv_vwsub_vx_i32m4(__VA_ARGS__) |
| #define vwsub_vx_i32m4_m | ( | ... | ) | __riscv_vwsub_vx_i32m4_tumu(__VA_ARGS__) |
| #define vwsub_vx_i32m8 | ( | ... | ) | __riscv_vwsub_vx_i32m8(__VA_ARGS__) |
| #define vwsub_vx_i32m8_m | ( | ... | ) | __riscv_vwsub_vx_i32m8_tumu(__VA_ARGS__) |
| #define vwsub_vx_i32mf2 | ( | ... | ) | __riscv_vwsub_vx_i32mf2(__VA_ARGS__) |
| #define vwsub_vx_i32mf2_m | ( | ... | ) | __riscv_vwsub_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vwsub_vx_i64m1 | ( | ... | ) | __riscv_vwsub_vx_i64m1(__VA_ARGS__) |
| #define vwsub_vx_i64m1_m | ( | ... | ) | __riscv_vwsub_vx_i64m1_tumu(__VA_ARGS__) |
| #define vwsub_vx_i64m2 | ( | ... | ) | __riscv_vwsub_vx_i64m2(__VA_ARGS__) |
| #define vwsub_vx_i64m2_m | ( | ... | ) | __riscv_vwsub_vx_i64m2_tumu(__VA_ARGS__) |
| #define vwsub_vx_i64m4 | ( | ... | ) | __riscv_vwsub_vx_i64m4(__VA_ARGS__) |
| #define vwsub_vx_i64m4_m | ( | ... | ) | __riscv_vwsub_vx_i64m4_tumu(__VA_ARGS__) |
| #define vwsub_vx_i64m8 | ( | ... | ) | __riscv_vwsub_vx_i64m8(__VA_ARGS__) |
| #define vwsub_vx_i64m8_m | ( | ... | ) | __riscv_vwsub_vx_i64m8_tumu(__VA_ARGS__) |
| #define vwsub_wv_i16m1 | ( | ... | ) | __riscv_vwsub_wv_i16m1(__VA_ARGS__) |
| #define vwsub_wv_i16m1_m | ( | ... | ) | __riscv_vwsub_wv_i16m1_tumu(__VA_ARGS__) |
| #define vwsub_wv_i16m2 | ( | ... | ) | __riscv_vwsub_wv_i16m2(__VA_ARGS__) |
| #define vwsub_wv_i16m2_m | ( | ... | ) | __riscv_vwsub_wv_i16m2_tumu(__VA_ARGS__) |
| #define vwsub_wv_i16m4 | ( | ... | ) | __riscv_vwsub_wv_i16m4(__VA_ARGS__) |
| #define vwsub_wv_i16m4_m | ( | ... | ) | __riscv_vwsub_wv_i16m4_tumu(__VA_ARGS__) |
| #define vwsub_wv_i16m8 | ( | ... | ) | __riscv_vwsub_wv_i16m8(__VA_ARGS__) |
| #define vwsub_wv_i16m8_m | ( | ... | ) | __riscv_vwsub_wv_i16m8_tumu(__VA_ARGS__) |
| #define vwsub_wv_i16mf2 | ( | ... | ) | __riscv_vwsub_wv_i16mf2(__VA_ARGS__) |
| #define vwsub_wv_i16mf2_m | ( | ... | ) | __riscv_vwsub_wv_i16mf2_tumu(__VA_ARGS__) |
| #define vwsub_wv_i16mf4 | ( | ... | ) | __riscv_vwsub_wv_i16mf4(__VA_ARGS__) |
| #define vwsub_wv_i16mf4_m | ( | ... | ) | __riscv_vwsub_wv_i16mf4_tumu(__VA_ARGS__) |
| #define vwsub_wv_i32m1 | ( | ... | ) | __riscv_vwsub_wv_i32m1(__VA_ARGS__) |
| #define vwsub_wv_i32m1_m | ( | ... | ) | __riscv_vwsub_wv_i32m1_tumu(__VA_ARGS__) |
| #define vwsub_wv_i32m2 | ( | ... | ) | __riscv_vwsub_wv_i32m2(__VA_ARGS__) |
| #define vwsub_wv_i32m2_m | ( | ... | ) | __riscv_vwsub_wv_i32m2_tumu(__VA_ARGS__) |
| #define vwsub_wv_i32m4 | ( | ... | ) | __riscv_vwsub_wv_i32m4(__VA_ARGS__) |
| #define vwsub_wv_i32m4_m | ( | ... | ) | __riscv_vwsub_wv_i32m4_tumu(__VA_ARGS__) |
| #define vwsub_wv_i32m8 | ( | ... | ) | __riscv_vwsub_wv_i32m8(__VA_ARGS__) |
| #define vwsub_wv_i32m8_m | ( | ... | ) | __riscv_vwsub_wv_i32m8_tumu(__VA_ARGS__) |
| #define vwsub_wv_i32mf2 | ( | ... | ) | __riscv_vwsub_wv_i32mf2(__VA_ARGS__) |
| #define vwsub_wv_i32mf2_m | ( | ... | ) | __riscv_vwsub_wv_i32mf2_tumu(__VA_ARGS__) |
| #define vwsub_wv_i64m1 | ( | ... | ) | __riscv_vwsub_wv_i64m1(__VA_ARGS__) |
| #define vwsub_wv_i64m1_m | ( | ... | ) | __riscv_vwsub_wv_i64m1_tumu(__VA_ARGS__) |
| #define vwsub_wv_i64m2 | ( | ... | ) | __riscv_vwsub_wv_i64m2(__VA_ARGS__) |
| #define vwsub_wv_i64m2_m | ( | ... | ) | __riscv_vwsub_wv_i64m2_tumu(__VA_ARGS__) |
| #define vwsub_wv_i64m4 | ( | ... | ) | __riscv_vwsub_wv_i64m4(__VA_ARGS__) |
| #define vwsub_wv_i64m4_m | ( | ... | ) | __riscv_vwsub_wv_i64m4_tumu(__VA_ARGS__) |
| #define vwsub_wv_i64m8 | ( | ... | ) | __riscv_vwsub_wv_i64m8(__VA_ARGS__) |
| #define vwsub_wv_i64m8_m | ( | ... | ) | __riscv_vwsub_wv_i64m8_tumu(__VA_ARGS__) |
| #define vwsub_wx_i16m1 | ( | ... | ) | __riscv_vwsub_wx_i16m1(__VA_ARGS__) |
| #define vwsub_wx_i16m1_m | ( | ... | ) | __riscv_vwsub_wx_i16m1_tumu(__VA_ARGS__) |
| #define vwsub_wx_i16m2 | ( | ... | ) | __riscv_vwsub_wx_i16m2(__VA_ARGS__) |
| #define vwsub_wx_i16m2_m | ( | ... | ) | __riscv_vwsub_wx_i16m2_tumu(__VA_ARGS__) |
| #define vwsub_wx_i16m4 | ( | ... | ) | __riscv_vwsub_wx_i16m4(__VA_ARGS__) |
| #define vwsub_wx_i16m4_m | ( | ... | ) | __riscv_vwsub_wx_i16m4_tumu(__VA_ARGS__) |
| #define vwsub_wx_i16m8 | ( | ... | ) | __riscv_vwsub_wx_i16m8(__VA_ARGS__) |
| #define vwsub_wx_i16m8_m | ( | ... | ) | __riscv_vwsub_wx_i16m8_tumu(__VA_ARGS__) |
| #define vwsub_wx_i16mf2 | ( | ... | ) | __riscv_vwsub_wx_i16mf2(__VA_ARGS__) |
| #define vwsub_wx_i16mf2_m | ( | ... | ) | __riscv_vwsub_wx_i16mf2_tumu(__VA_ARGS__) |
| #define vwsub_wx_i16mf4 | ( | ... | ) | __riscv_vwsub_wx_i16mf4(__VA_ARGS__) |
| #define vwsub_wx_i16mf4_m | ( | ... | ) | __riscv_vwsub_wx_i16mf4_tumu(__VA_ARGS__) |
| #define vwsub_wx_i32m1 | ( | ... | ) | __riscv_vwsub_wx_i32m1(__VA_ARGS__) |
| #define vwsub_wx_i32m1_m | ( | ... | ) | __riscv_vwsub_wx_i32m1_tumu(__VA_ARGS__) |
| #define vwsub_wx_i32m2 | ( | ... | ) | __riscv_vwsub_wx_i32m2(__VA_ARGS__) |
| #define vwsub_wx_i32m2_m | ( | ... | ) | __riscv_vwsub_wx_i32m2_tumu(__VA_ARGS__) |
| #define vwsub_wx_i32m4 | ( | ... | ) | __riscv_vwsub_wx_i32m4(__VA_ARGS__) |
| #define vwsub_wx_i32m4_m | ( | ... | ) | __riscv_vwsub_wx_i32m4_tumu(__VA_ARGS__) |
| #define vwsub_wx_i32m8 | ( | ... | ) | __riscv_vwsub_wx_i32m8(__VA_ARGS__) |
| #define vwsub_wx_i32m8_m | ( | ... | ) | __riscv_vwsub_wx_i32m8_tumu(__VA_ARGS__) |
| #define vwsub_wx_i32mf2 | ( | ... | ) | __riscv_vwsub_wx_i32mf2(__VA_ARGS__) |
| #define vwsub_wx_i32mf2_m | ( | ... | ) | __riscv_vwsub_wx_i32mf2_tumu(__VA_ARGS__) |
| #define vwsub_wx_i64m1 | ( | ... | ) | __riscv_vwsub_wx_i64m1(__VA_ARGS__) |
| #define vwsub_wx_i64m1_m | ( | ... | ) | __riscv_vwsub_wx_i64m1_tumu(__VA_ARGS__) |
| #define vwsub_wx_i64m2 | ( | ... | ) | __riscv_vwsub_wx_i64m2(__VA_ARGS__) |
| #define vwsub_wx_i64m2_m | ( | ... | ) | __riscv_vwsub_wx_i64m2_tumu(__VA_ARGS__) |
| #define vwsub_wx_i64m4 | ( | ... | ) | __riscv_vwsub_wx_i64m4(__VA_ARGS__) |
| #define vwsub_wx_i64m4_m | ( | ... | ) | __riscv_vwsub_wx_i64m4_tumu(__VA_ARGS__) |
| #define vwsub_wx_i64m8 | ( | ... | ) | __riscv_vwsub_wx_i64m8(__VA_ARGS__) |
| #define vwsub_wx_i64m8_m | ( | ... | ) | __riscv_vwsub_wx_i64m8_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u16m1 | ( | ... | ) | __riscv_vwsubu_vv_u16m1(__VA_ARGS__) |
| #define vwsubu_vv_u16m1_m | ( | ... | ) | __riscv_vwsubu_vv_u16m1_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u16m2 | ( | ... | ) | __riscv_vwsubu_vv_u16m2(__VA_ARGS__) |
| #define vwsubu_vv_u16m2_m | ( | ... | ) | __riscv_vwsubu_vv_u16m2_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u16m4 | ( | ... | ) | __riscv_vwsubu_vv_u16m4(__VA_ARGS__) |
| #define vwsubu_vv_u16m4_m | ( | ... | ) | __riscv_vwsubu_vv_u16m4_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u16m8 | ( | ... | ) | __riscv_vwsubu_vv_u16m8(__VA_ARGS__) |
| #define vwsubu_vv_u16m8_m | ( | ... | ) | __riscv_vwsubu_vv_u16m8_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u16mf2 | ( | ... | ) | __riscv_vwsubu_vv_u16mf2(__VA_ARGS__) |
| #define vwsubu_vv_u16mf2_m | ( | ... | ) | __riscv_vwsubu_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u16mf4 | ( | ... | ) | __riscv_vwsubu_vv_u16mf4(__VA_ARGS__) |
| #define vwsubu_vv_u16mf4_m | ( | ... | ) | __riscv_vwsubu_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u32m1 | ( | ... | ) | __riscv_vwsubu_vv_u32m1(__VA_ARGS__) |
| #define vwsubu_vv_u32m1_m | ( | ... | ) | __riscv_vwsubu_vv_u32m1_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u32m2 | ( | ... | ) | __riscv_vwsubu_vv_u32m2(__VA_ARGS__) |
| #define vwsubu_vv_u32m2_m | ( | ... | ) | __riscv_vwsubu_vv_u32m2_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u32m4 | ( | ... | ) | __riscv_vwsubu_vv_u32m4(__VA_ARGS__) |
| #define vwsubu_vv_u32m4_m | ( | ... | ) | __riscv_vwsubu_vv_u32m4_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u32m8 | ( | ... | ) | __riscv_vwsubu_vv_u32m8(__VA_ARGS__) |
| #define vwsubu_vv_u32m8_m | ( | ... | ) | __riscv_vwsubu_vv_u32m8_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u32mf2 | ( | ... | ) | __riscv_vwsubu_vv_u32mf2(__VA_ARGS__) |
| #define vwsubu_vv_u32mf2_m | ( | ... | ) | __riscv_vwsubu_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u64m1 | ( | ... | ) | __riscv_vwsubu_vv_u64m1(__VA_ARGS__) |
| #define vwsubu_vv_u64m1_m | ( | ... | ) | __riscv_vwsubu_vv_u64m1_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u64m2 | ( | ... | ) | __riscv_vwsubu_vv_u64m2(__VA_ARGS__) |
| #define vwsubu_vv_u64m2_m | ( | ... | ) | __riscv_vwsubu_vv_u64m2_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u64m4 | ( | ... | ) | __riscv_vwsubu_vv_u64m4(__VA_ARGS__) |
| #define vwsubu_vv_u64m4_m | ( | ... | ) | __riscv_vwsubu_vv_u64m4_tumu(__VA_ARGS__) |
| #define vwsubu_vv_u64m8 | ( | ... | ) | __riscv_vwsubu_vv_u64m8(__VA_ARGS__) |
| #define vwsubu_vv_u64m8_m | ( | ... | ) | __riscv_vwsubu_vv_u64m8_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u16m1 | ( | ... | ) | __riscv_vwsubu_vx_u16m1(__VA_ARGS__) |
| #define vwsubu_vx_u16m1_m | ( | ... | ) | __riscv_vwsubu_vx_u16m1_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u16m2 | ( | ... | ) | __riscv_vwsubu_vx_u16m2(__VA_ARGS__) |
| #define vwsubu_vx_u16m2_m | ( | ... | ) | __riscv_vwsubu_vx_u16m2_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u16m4 | ( | ... | ) | __riscv_vwsubu_vx_u16m4(__VA_ARGS__) |
| #define vwsubu_vx_u16m4_m | ( | ... | ) | __riscv_vwsubu_vx_u16m4_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u16m8 | ( | ... | ) | __riscv_vwsubu_vx_u16m8(__VA_ARGS__) |
| #define vwsubu_vx_u16m8_m | ( | ... | ) | __riscv_vwsubu_vx_u16m8_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u16mf2 | ( | ... | ) | __riscv_vwsubu_vx_u16mf2(__VA_ARGS__) |
| #define vwsubu_vx_u16mf2_m | ( | ... | ) | __riscv_vwsubu_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u16mf4 | ( | ... | ) | __riscv_vwsubu_vx_u16mf4(__VA_ARGS__) |
| #define vwsubu_vx_u16mf4_m | ( | ... | ) | __riscv_vwsubu_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u32m1 | ( | ... | ) | __riscv_vwsubu_vx_u32m1(__VA_ARGS__) |
| #define vwsubu_vx_u32m1_m | ( | ... | ) | __riscv_vwsubu_vx_u32m1_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u32m2 | ( | ... | ) | __riscv_vwsubu_vx_u32m2(__VA_ARGS__) |
| #define vwsubu_vx_u32m2_m | ( | ... | ) | __riscv_vwsubu_vx_u32m2_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u32m4 | ( | ... | ) | __riscv_vwsubu_vx_u32m4(__VA_ARGS__) |
| #define vwsubu_vx_u32m4_m | ( | ... | ) | __riscv_vwsubu_vx_u32m4_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u32m8 | ( | ... | ) | __riscv_vwsubu_vx_u32m8(__VA_ARGS__) |
| #define vwsubu_vx_u32m8_m | ( | ... | ) | __riscv_vwsubu_vx_u32m8_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u32mf2 | ( | ... | ) | __riscv_vwsubu_vx_u32mf2(__VA_ARGS__) |
| #define vwsubu_vx_u32mf2_m | ( | ... | ) | __riscv_vwsubu_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u64m1 | ( | ... | ) | __riscv_vwsubu_vx_u64m1(__VA_ARGS__) |
| #define vwsubu_vx_u64m1_m | ( | ... | ) | __riscv_vwsubu_vx_u64m1_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u64m2 | ( | ... | ) | __riscv_vwsubu_vx_u64m2(__VA_ARGS__) |
| #define vwsubu_vx_u64m2_m | ( | ... | ) | __riscv_vwsubu_vx_u64m2_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u64m4 | ( | ... | ) | __riscv_vwsubu_vx_u64m4(__VA_ARGS__) |
| #define vwsubu_vx_u64m4_m | ( | ... | ) | __riscv_vwsubu_vx_u64m4_tumu(__VA_ARGS__) |
| #define vwsubu_vx_u64m8 | ( | ... | ) | __riscv_vwsubu_vx_u64m8(__VA_ARGS__) |
| #define vwsubu_vx_u64m8_m | ( | ... | ) | __riscv_vwsubu_vx_u64m8_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u16m1 | ( | ... | ) | __riscv_vwsubu_wv_u16m1(__VA_ARGS__) |
| #define vwsubu_wv_u16m1_m | ( | ... | ) | __riscv_vwsubu_wv_u16m1_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u16m2 | ( | ... | ) | __riscv_vwsubu_wv_u16m2(__VA_ARGS__) |
| #define vwsubu_wv_u16m2_m | ( | ... | ) | __riscv_vwsubu_wv_u16m2_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u16m4 | ( | ... | ) | __riscv_vwsubu_wv_u16m4(__VA_ARGS__) |
| #define vwsubu_wv_u16m4_m | ( | ... | ) | __riscv_vwsubu_wv_u16m4_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u16m8 | ( | ... | ) | __riscv_vwsubu_wv_u16m8(__VA_ARGS__) |
| #define vwsubu_wv_u16m8_m | ( | ... | ) | __riscv_vwsubu_wv_u16m8_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u16mf2 | ( | ... | ) | __riscv_vwsubu_wv_u16mf2(__VA_ARGS__) |
| #define vwsubu_wv_u16mf2_m | ( | ... | ) | __riscv_vwsubu_wv_u16mf2_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u16mf4 | ( | ... | ) | __riscv_vwsubu_wv_u16mf4(__VA_ARGS__) |
| #define vwsubu_wv_u16mf4_m | ( | ... | ) | __riscv_vwsubu_wv_u16mf4_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u32m1 | ( | ... | ) | __riscv_vwsubu_wv_u32m1(__VA_ARGS__) |
| #define vwsubu_wv_u32m1_m | ( | ... | ) | __riscv_vwsubu_wv_u32m1_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u32m2 | ( | ... | ) | __riscv_vwsubu_wv_u32m2(__VA_ARGS__) |
| #define vwsubu_wv_u32m2_m | ( | ... | ) | __riscv_vwsubu_wv_u32m2_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u32m4 | ( | ... | ) | __riscv_vwsubu_wv_u32m4(__VA_ARGS__) |
| #define vwsubu_wv_u32m4_m | ( | ... | ) | __riscv_vwsubu_wv_u32m4_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u32m8 | ( | ... | ) | __riscv_vwsubu_wv_u32m8(__VA_ARGS__) |
| #define vwsubu_wv_u32m8_m | ( | ... | ) | __riscv_vwsubu_wv_u32m8_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u32mf2 | ( | ... | ) | __riscv_vwsubu_wv_u32mf2(__VA_ARGS__) |
| #define vwsubu_wv_u32mf2_m | ( | ... | ) | __riscv_vwsubu_wv_u32mf2_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u64m1 | ( | ... | ) | __riscv_vwsubu_wv_u64m1(__VA_ARGS__) |
| #define vwsubu_wv_u64m1_m | ( | ... | ) | __riscv_vwsubu_wv_u64m1_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u64m2 | ( | ... | ) | __riscv_vwsubu_wv_u64m2(__VA_ARGS__) |
| #define vwsubu_wv_u64m2_m | ( | ... | ) | __riscv_vwsubu_wv_u64m2_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u64m4 | ( | ... | ) | __riscv_vwsubu_wv_u64m4(__VA_ARGS__) |
| #define vwsubu_wv_u64m4_m | ( | ... | ) | __riscv_vwsubu_wv_u64m4_tumu(__VA_ARGS__) |
| #define vwsubu_wv_u64m8 | ( | ... | ) | __riscv_vwsubu_wv_u64m8(__VA_ARGS__) |
| #define vwsubu_wv_u64m8_m | ( | ... | ) | __riscv_vwsubu_wv_u64m8_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u16m1 | ( | ... | ) | __riscv_vwsubu_wx_u16m1(__VA_ARGS__) |
| #define vwsubu_wx_u16m1_m | ( | ... | ) | __riscv_vwsubu_wx_u16m1_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u16m2 | ( | ... | ) | __riscv_vwsubu_wx_u16m2(__VA_ARGS__) |
| #define vwsubu_wx_u16m2_m | ( | ... | ) | __riscv_vwsubu_wx_u16m2_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u16m4 | ( | ... | ) | __riscv_vwsubu_wx_u16m4(__VA_ARGS__) |
| #define vwsubu_wx_u16m4_m | ( | ... | ) | __riscv_vwsubu_wx_u16m4_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u16m8 | ( | ... | ) | __riscv_vwsubu_wx_u16m8(__VA_ARGS__) |
| #define vwsubu_wx_u16m8_m | ( | ... | ) | __riscv_vwsubu_wx_u16m8_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u16mf2 | ( | ... | ) | __riscv_vwsubu_wx_u16mf2(__VA_ARGS__) |
| #define vwsubu_wx_u16mf2_m | ( | ... | ) | __riscv_vwsubu_wx_u16mf2_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u16mf4 | ( | ... | ) | __riscv_vwsubu_wx_u16mf4(__VA_ARGS__) |
| #define vwsubu_wx_u16mf4_m | ( | ... | ) | __riscv_vwsubu_wx_u16mf4_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u32m1 | ( | ... | ) | __riscv_vwsubu_wx_u32m1(__VA_ARGS__) |
| #define vwsubu_wx_u32m1_m | ( | ... | ) | __riscv_vwsubu_wx_u32m1_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u32m2 | ( | ... | ) | __riscv_vwsubu_wx_u32m2(__VA_ARGS__) |
| #define vwsubu_wx_u32m2_m | ( | ... | ) | __riscv_vwsubu_wx_u32m2_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u32m4 | ( | ... | ) | __riscv_vwsubu_wx_u32m4(__VA_ARGS__) |
| #define vwsubu_wx_u32m4_m | ( | ... | ) | __riscv_vwsubu_wx_u32m4_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u32m8 | ( | ... | ) | __riscv_vwsubu_wx_u32m8(__VA_ARGS__) |
| #define vwsubu_wx_u32m8_m | ( | ... | ) | __riscv_vwsubu_wx_u32m8_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u32mf2 | ( | ... | ) | __riscv_vwsubu_wx_u32mf2(__VA_ARGS__) |
| #define vwsubu_wx_u32mf2_m | ( | ... | ) | __riscv_vwsubu_wx_u32mf2_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u64m1 | ( | ... | ) | __riscv_vwsubu_wx_u64m1(__VA_ARGS__) |
| #define vwsubu_wx_u64m1_m | ( | ... | ) | __riscv_vwsubu_wx_u64m1_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u64m2 | ( | ... | ) | __riscv_vwsubu_wx_u64m2(__VA_ARGS__) |
| #define vwsubu_wx_u64m2_m | ( | ... | ) | __riscv_vwsubu_wx_u64m2_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u64m4 | ( | ... | ) | __riscv_vwsubu_wx_u64m4(__VA_ARGS__) |
| #define vwsubu_wx_u64m4_m | ( | ... | ) | __riscv_vwsubu_wx_u64m4_tumu(__VA_ARGS__) |
| #define vwsubu_wx_u64m8 | ( | ... | ) | __riscv_vwsubu_wx_u64m8(__VA_ARGS__) |
| #define vwsubu_wx_u64m8_m | ( | ... | ) | __riscv_vwsubu_wx_u64m8_tumu(__VA_ARGS__) |
| #define vxor_vv_i16m1 | ( | ... | ) | __riscv_vxor_vv_i16m1(__VA_ARGS__) |
| #define vxor_vv_i16m1_m | ( | ... | ) | __riscv_vxor_vv_i16m1_tumu(__VA_ARGS__) |
| #define vxor_vv_i16m2 | ( | ... | ) | __riscv_vxor_vv_i16m2(__VA_ARGS__) |
| #define vxor_vv_i16m2_m | ( | ... | ) | __riscv_vxor_vv_i16m2_tumu(__VA_ARGS__) |
| #define vxor_vv_i16m4 | ( | ... | ) | __riscv_vxor_vv_i16m4(__VA_ARGS__) |
| #define vxor_vv_i16m4_m | ( | ... | ) | __riscv_vxor_vv_i16m4_tumu(__VA_ARGS__) |
| #define vxor_vv_i16m8 | ( | ... | ) | __riscv_vxor_vv_i16m8(__VA_ARGS__) |
| #define vxor_vv_i16m8_m | ( | ... | ) | __riscv_vxor_vv_i16m8_tumu(__VA_ARGS__) |
| #define vxor_vv_i16mf2 | ( | ... | ) | __riscv_vxor_vv_i16mf2(__VA_ARGS__) |
| #define vxor_vv_i16mf2_m | ( | ... | ) | __riscv_vxor_vv_i16mf2_tumu(__VA_ARGS__) |
| #define vxor_vv_i16mf4 | ( | ... | ) | __riscv_vxor_vv_i16mf4(__VA_ARGS__) |
| #define vxor_vv_i16mf4_m | ( | ... | ) | __riscv_vxor_vv_i16mf4_tumu(__VA_ARGS__) |
| #define vxor_vv_i32m1 | ( | ... | ) | __riscv_vxor_vv_i32m1(__VA_ARGS__) |
| #define vxor_vv_i32m1_m | ( | ... | ) | __riscv_vxor_vv_i32m1_tumu(__VA_ARGS__) |
| #define vxor_vv_i32m2 | ( | ... | ) | __riscv_vxor_vv_i32m2(__VA_ARGS__) |
| #define vxor_vv_i32m2_m | ( | ... | ) | __riscv_vxor_vv_i32m2_tumu(__VA_ARGS__) |
| #define vxor_vv_i32m4 | ( | ... | ) | __riscv_vxor_vv_i32m4(__VA_ARGS__) |
| #define vxor_vv_i32m4_m | ( | ... | ) | __riscv_vxor_vv_i32m4_tumu(__VA_ARGS__) |
| #define vxor_vv_i32m8 | ( | ... | ) | __riscv_vxor_vv_i32m8(__VA_ARGS__) |
| #define vxor_vv_i32m8_m | ( | ... | ) | __riscv_vxor_vv_i32m8_tumu(__VA_ARGS__) |
| #define vxor_vv_i32mf2 | ( | ... | ) | __riscv_vxor_vv_i32mf2(__VA_ARGS__) |
| #define vxor_vv_i32mf2_m | ( | ... | ) | __riscv_vxor_vv_i32mf2_tumu(__VA_ARGS__) |
| #define vxor_vv_i64m1 | ( | ... | ) | __riscv_vxor_vv_i64m1(__VA_ARGS__) |
| #define vxor_vv_i64m1_m | ( | ... | ) | __riscv_vxor_vv_i64m1_tumu(__VA_ARGS__) |
| #define vxor_vv_i64m2 | ( | ... | ) | __riscv_vxor_vv_i64m2(__VA_ARGS__) |
| #define vxor_vv_i64m2_m | ( | ... | ) | __riscv_vxor_vv_i64m2_tumu(__VA_ARGS__) |
| #define vxor_vv_i64m4 | ( | ... | ) | __riscv_vxor_vv_i64m4(__VA_ARGS__) |
| #define vxor_vv_i64m4_m | ( | ... | ) | __riscv_vxor_vv_i64m4_tumu(__VA_ARGS__) |
| #define vxor_vv_i64m8 | ( | ... | ) | __riscv_vxor_vv_i64m8(__VA_ARGS__) |
| #define vxor_vv_i64m8_m | ( | ... | ) | __riscv_vxor_vv_i64m8_tumu(__VA_ARGS__) |
| #define vxor_vv_i8m1 | ( | ... | ) | __riscv_vxor_vv_i8m1(__VA_ARGS__) |
| #define vxor_vv_i8m1_m | ( | ... | ) | __riscv_vxor_vv_i8m1_tumu(__VA_ARGS__) |
| #define vxor_vv_i8m2 | ( | ... | ) | __riscv_vxor_vv_i8m2(__VA_ARGS__) |
| #define vxor_vv_i8m2_m | ( | ... | ) | __riscv_vxor_vv_i8m2_tumu(__VA_ARGS__) |
| #define vxor_vv_i8m4 | ( | ... | ) | __riscv_vxor_vv_i8m4(__VA_ARGS__) |
| #define vxor_vv_i8m4_m | ( | ... | ) | __riscv_vxor_vv_i8m4_tumu(__VA_ARGS__) |
| #define vxor_vv_i8m8 | ( | ... | ) | __riscv_vxor_vv_i8m8(__VA_ARGS__) |
| #define vxor_vv_i8m8_m | ( | ... | ) | __riscv_vxor_vv_i8m8_tumu(__VA_ARGS__) |
| #define vxor_vv_i8mf2 | ( | ... | ) | __riscv_vxor_vv_i8mf2(__VA_ARGS__) |
| #define vxor_vv_i8mf2_m | ( | ... | ) | __riscv_vxor_vv_i8mf2_tumu(__VA_ARGS__) |
| #define vxor_vv_i8mf4 | ( | ... | ) | __riscv_vxor_vv_i8mf4(__VA_ARGS__) |
| #define vxor_vv_i8mf4_m | ( | ... | ) | __riscv_vxor_vv_i8mf4_tumu(__VA_ARGS__) |
| #define vxor_vv_i8mf8 | ( | ... | ) | __riscv_vxor_vv_i8mf8(__VA_ARGS__) |
| #define vxor_vv_i8mf8_m | ( | ... | ) | __riscv_vxor_vv_i8mf8_tumu(__VA_ARGS__) |
| #define vxor_vv_u16m1 | ( | ... | ) | __riscv_vxor_vv_u16m1(__VA_ARGS__) |
| #define vxor_vv_u16m1_m | ( | ... | ) | __riscv_vxor_vv_u16m1_tumu(__VA_ARGS__) |
| #define vxor_vv_u16m2 | ( | ... | ) | __riscv_vxor_vv_u16m2(__VA_ARGS__) |
| #define vxor_vv_u16m2_m | ( | ... | ) | __riscv_vxor_vv_u16m2_tumu(__VA_ARGS__) |
| #define vxor_vv_u16m4 | ( | ... | ) | __riscv_vxor_vv_u16m4(__VA_ARGS__) |
| #define vxor_vv_u16m4_m | ( | ... | ) | __riscv_vxor_vv_u16m4_tumu(__VA_ARGS__) |
| #define vxor_vv_u16m8 | ( | ... | ) | __riscv_vxor_vv_u16m8(__VA_ARGS__) |
| #define vxor_vv_u16m8_m | ( | ... | ) | __riscv_vxor_vv_u16m8_tumu(__VA_ARGS__) |
| #define vxor_vv_u16mf2 | ( | ... | ) | __riscv_vxor_vv_u16mf2(__VA_ARGS__) |
| #define vxor_vv_u16mf2_m | ( | ... | ) | __riscv_vxor_vv_u16mf2_tumu(__VA_ARGS__) |
| #define vxor_vv_u16mf4 | ( | ... | ) | __riscv_vxor_vv_u16mf4(__VA_ARGS__) |
| #define vxor_vv_u16mf4_m | ( | ... | ) | __riscv_vxor_vv_u16mf4_tumu(__VA_ARGS__) |
| #define vxor_vv_u32m1 | ( | ... | ) | __riscv_vxor_vv_u32m1(__VA_ARGS__) |
| #define vxor_vv_u32m1_m | ( | ... | ) | __riscv_vxor_vv_u32m1_tumu(__VA_ARGS__) |
| #define vxor_vv_u32m2 | ( | ... | ) | __riscv_vxor_vv_u32m2(__VA_ARGS__) |
| #define vxor_vv_u32m2_m | ( | ... | ) | __riscv_vxor_vv_u32m2_tumu(__VA_ARGS__) |
| #define vxor_vv_u32m4 | ( | ... | ) | __riscv_vxor_vv_u32m4(__VA_ARGS__) |
| #define vxor_vv_u32m4_m | ( | ... | ) | __riscv_vxor_vv_u32m4_tumu(__VA_ARGS__) |
| #define vxor_vv_u32m8 | ( | ... | ) | __riscv_vxor_vv_u32m8(__VA_ARGS__) |
| #define vxor_vv_u32m8_m | ( | ... | ) | __riscv_vxor_vv_u32m8_tumu(__VA_ARGS__) |
| #define vxor_vv_u32mf2 | ( | ... | ) | __riscv_vxor_vv_u32mf2(__VA_ARGS__) |
| #define vxor_vv_u32mf2_m | ( | ... | ) | __riscv_vxor_vv_u32mf2_tumu(__VA_ARGS__) |
| #define vxor_vv_u64m1 | ( | ... | ) | __riscv_vxor_vv_u64m1(__VA_ARGS__) |
| #define vxor_vv_u64m1_m | ( | ... | ) | __riscv_vxor_vv_u64m1_tumu(__VA_ARGS__) |
| #define vxor_vv_u64m2 | ( | ... | ) | __riscv_vxor_vv_u64m2(__VA_ARGS__) |
| #define vxor_vv_u64m2_m | ( | ... | ) | __riscv_vxor_vv_u64m2_tumu(__VA_ARGS__) |
| #define vxor_vv_u64m4 | ( | ... | ) | __riscv_vxor_vv_u64m4(__VA_ARGS__) |
| #define vxor_vv_u64m4_m | ( | ... | ) | __riscv_vxor_vv_u64m4_tumu(__VA_ARGS__) |
| #define vxor_vv_u64m8 | ( | ... | ) | __riscv_vxor_vv_u64m8(__VA_ARGS__) |
| #define vxor_vv_u64m8_m | ( | ... | ) | __riscv_vxor_vv_u64m8_tumu(__VA_ARGS__) |
| #define vxor_vv_u8m1 | ( | ... | ) | __riscv_vxor_vv_u8m1(__VA_ARGS__) |
| #define vxor_vv_u8m1_m | ( | ... | ) | __riscv_vxor_vv_u8m1_tumu(__VA_ARGS__) |
| #define vxor_vv_u8m2 | ( | ... | ) | __riscv_vxor_vv_u8m2(__VA_ARGS__) |
| #define vxor_vv_u8m2_m | ( | ... | ) | __riscv_vxor_vv_u8m2_tumu(__VA_ARGS__) |
| #define vxor_vv_u8m4 | ( | ... | ) | __riscv_vxor_vv_u8m4(__VA_ARGS__) |
| #define vxor_vv_u8m4_m | ( | ... | ) | __riscv_vxor_vv_u8m4_tumu(__VA_ARGS__) |
| #define vxor_vv_u8m8 | ( | ... | ) | __riscv_vxor_vv_u8m8(__VA_ARGS__) |
| #define vxor_vv_u8m8_m | ( | ... | ) | __riscv_vxor_vv_u8m8_tumu(__VA_ARGS__) |
| #define vxor_vv_u8mf2 | ( | ... | ) | __riscv_vxor_vv_u8mf2(__VA_ARGS__) |
| #define vxor_vv_u8mf2_m | ( | ... | ) | __riscv_vxor_vv_u8mf2_tumu(__VA_ARGS__) |
| #define vxor_vv_u8mf4 | ( | ... | ) | __riscv_vxor_vv_u8mf4(__VA_ARGS__) |
| #define vxor_vv_u8mf4_m | ( | ... | ) | __riscv_vxor_vv_u8mf4_tumu(__VA_ARGS__) |
| #define vxor_vv_u8mf8 | ( | ... | ) | __riscv_vxor_vv_u8mf8(__VA_ARGS__) |
| #define vxor_vv_u8mf8_m | ( | ... | ) | __riscv_vxor_vv_u8mf8_tumu(__VA_ARGS__) |
| #define vxor_vx_i16m1 | ( | ... | ) | __riscv_vxor_vx_i16m1(__VA_ARGS__) |
| #define vxor_vx_i16m1_m | ( | ... | ) | __riscv_vxor_vx_i16m1_tumu(__VA_ARGS__) |
| #define vxor_vx_i16m2 | ( | ... | ) | __riscv_vxor_vx_i16m2(__VA_ARGS__) |
| #define vxor_vx_i16m2_m | ( | ... | ) | __riscv_vxor_vx_i16m2_tumu(__VA_ARGS__) |
| #define vxor_vx_i16m4 | ( | ... | ) | __riscv_vxor_vx_i16m4(__VA_ARGS__) |
| #define vxor_vx_i16m4_m | ( | ... | ) | __riscv_vxor_vx_i16m4_tumu(__VA_ARGS__) |
| #define vxor_vx_i16m8 | ( | ... | ) | __riscv_vxor_vx_i16m8(__VA_ARGS__) |
| #define vxor_vx_i16m8_m | ( | ... | ) | __riscv_vxor_vx_i16m8_tumu(__VA_ARGS__) |
| #define vxor_vx_i16mf2 | ( | ... | ) | __riscv_vxor_vx_i16mf2(__VA_ARGS__) |
| #define vxor_vx_i16mf2_m | ( | ... | ) | __riscv_vxor_vx_i16mf2_tumu(__VA_ARGS__) |
| #define vxor_vx_i16mf4 | ( | ... | ) | __riscv_vxor_vx_i16mf4(__VA_ARGS__) |
| #define vxor_vx_i16mf4_m | ( | ... | ) | __riscv_vxor_vx_i16mf4_tumu(__VA_ARGS__) |
| #define vxor_vx_i32m1 | ( | ... | ) | __riscv_vxor_vx_i32m1(__VA_ARGS__) |
| #define vxor_vx_i32m1_m | ( | ... | ) | __riscv_vxor_vx_i32m1_tumu(__VA_ARGS__) |
| #define vxor_vx_i32m2 | ( | ... | ) | __riscv_vxor_vx_i32m2(__VA_ARGS__) |
| #define vxor_vx_i32m2_m | ( | ... | ) | __riscv_vxor_vx_i32m2_tumu(__VA_ARGS__) |
| #define vxor_vx_i32m4 | ( | ... | ) | __riscv_vxor_vx_i32m4(__VA_ARGS__) |
| #define vxor_vx_i32m4_m | ( | ... | ) | __riscv_vxor_vx_i32m4_tumu(__VA_ARGS__) |
| #define vxor_vx_i32m8 | ( | ... | ) | __riscv_vxor_vx_i32m8(__VA_ARGS__) |
| #define vxor_vx_i32m8_m | ( | ... | ) | __riscv_vxor_vx_i32m8_tumu(__VA_ARGS__) |
| #define vxor_vx_i32mf2 | ( | ... | ) | __riscv_vxor_vx_i32mf2(__VA_ARGS__) |
| #define vxor_vx_i32mf2_m | ( | ... | ) | __riscv_vxor_vx_i32mf2_tumu(__VA_ARGS__) |
| #define vxor_vx_i64m1 | ( | ... | ) | __riscv_vxor_vx_i64m1(__VA_ARGS__) |
| #define vxor_vx_i64m1_m | ( | ... | ) | __riscv_vxor_vx_i64m1_tumu(__VA_ARGS__) |
| #define vxor_vx_i64m2 | ( | ... | ) | __riscv_vxor_vx_i64m2(__VA_ARGS__) |
| #define vxor_vx_i64m2_m | ( | ... | ) | __riscv_vxor_vx_i64m2_tumu(__VA_ARGS__) |
| #define vxor_vx_i64m4 | ( | ... | ) | __riscv_vxor_vx_i64m4(__VA_ARGS__) |
| #define vxor_vx_i64m4_m | ( | ... | ) | __riscv_vxor_vx_i64m4_tumu(__VA_ARGS__) |
| #define vxor_vx_i64m8 | ( | ... | ) | __riscv_vxor_vx_i64m8(__VA_ARGS__) |
| #define vxor_vx_i64m8_m | ( | ... | ) | __riscv_vxor_vx_i64m8_tumu(__VA_ARGS__) |
| #define vxor_vx_i8m1 | ( | ... | ) | __riscv_vxor_vx_i8m1(__VA_ARGS__) |
| #define vxor_vx_i8m1_m | ( | ... | ) | __riscv_vxor_vx_i8m1_tumu(__VA_ARGS__) |
| #define vxor_vx_i8m2 | ( | ... | ) | __riscv_vxor_vx_i8m2(__VA_ARGS__) |
| #define vxor_vx_i8m2_m | ( | ... | ) | __riscv_vxor_vx_i8m2_tumu(__VA_ARGS__) |
| #define vxor_vx_i8m4 | ( | ... | ) | __riscv_vxor_vx_i8m4(__VA_ARGS__) |
| #define vxor_vx_i8m4_m | ( | ... | ) | __riscv_vxor_vx_i8m4_tumu(__VA_ARGS__) |
| #define vxor_vx_i8m8 | ( | ... | ) | __riscv_vxor_vx_i8m8(__VA_ARGS__) |
| #define vxor_vx_i8m8_m | ( | ... | ) | __riscv_vxor_vx_i8m8_tumu(__VA_ARGS__) |
| #define vxor_vx_i8mf2 | ( | ... | ) | __riscv_vxor_vx_i8mf2(__VA_ARGS__) |
| #define vxor_vx_i8mf2_m | ( | ... | ) | __riscv_vxor_vx_i8mf2_tumu(__VA_ARGS__) |
| #define vxor_vx_i8mf4 | ( | ... | ) | __riscv_vxor_vx_i8mf4(__VA_ARGS__) |
| #define vxor_vx_i8mf4_m | ( | ... | ) | __riscv_vxor_vx_i8mf4_tumu(__VA_ARGS__) |
| #define vxor_vx_i8mf8 | ( | ... | ) | __riscv_vxor_vx_i8mf8(__VA_ARGS__) |
| #define vxor_vx_i8mf8_m | ( | ... | ) | __riscv_vxor_vx_i8mf8_tumu(__VA_ARGS__) |
| #define vxor_vx_u16m1 | ( | ... | ) | __riscv_vxor_vx_u16m1(__VA_ARGS__) |
| #define vxor_vx_u16m1_m | ( | ... | ) | __riscv_vxor_vx_u16m1_tumu(__VA_ARGS__) |
| #define vxor_vx_u16m2 | ( | ... | ) | __riscv_vxor_vx_u16m2(__VA_ARGS__) |
| #define vxor_vx_u16m2_m | ( | ... | ) | __riscv_vxor_vx_u16m2_tumu(__VA_ARGS__) |
| #define vxor_vx_u16m4 | ( | ... | ) | __riscv_vxor_vx_u16m4(__VA_ARGS__) |
| #define vxor_vx_u16m4_m | ( | ... | ) | __riscv_vxor_vx_u16m4_tumu(__VA_ARGS__) |
| #define vxor_vx_u16m8 | ( | ... | ) | __riscv_vxor_vx_u16m8(__VA_ARGS__) |
| #define vxor_vx_u16m8_m | ( | ... | ) | __riscv_vxor_vx_u16m8_tumu(__VA_ARGS__) |
| #define vxor_vx_u16mf2 | ( | ... | ) | __riscv_vxor_vx_u16mf2(__VA_ARGS__) |
| #define vxor_vx_u16mf2_m | ( | ... | ) | __riscv_vxor_vx_u16mf2_tumu(__VA_ARGS__) |
| #define vxor_vx_u16mf4 | ( | ... | ) | __riscv_vxor_vx_u16mf4(__VA_ARGS__) |
| #define vxor_vx_u16mf4_m | ( | ... | ) | __riscv_vxor_vx_u16mf4_tumu(__VA_ARGS__) |
| #define vxor_vx_u32m1 | ( | ... | ) | __riscv_vxor_vx_u32m1(__VA_ARGS__) |
| #define vxor_vx_u32m1_m | ( | ... | ) | __riscv_vxor_vx_u32m1_tumu(__VA_ARGS__) |
| #define vxor_vx_u32m2 | ( | ... | ) | __riscv_vxor_vx_u32m2(__VA_ARGS__) |
| #define vxor_vx_u32m2_m | ( | ... | ) | __riscv_vxor_vx_u32m2_tumu(__VA_ARGS__) |
| #define vxor_vx_u32m4 | ( | ... | ) | __riscv_vxor_vx_u32m4(__VA_ARGS__) |
| #define vxor_vx_u32m4_m | ( | ... | ) | __riscv_vxor_vx_u32m4_tumu(__VA_ARGS__) |
| #define vxor_vx_u32m8 | ( | ... | ) | __riscv_vxor_vx_u32m8(__VA_ARGS__) |
| #define vxor_vx_u32m8_m | ( | ... | ) | __riscv_vxor_vx_u32m8_tumu(__VA_ARGS__) |
| #define vxor_vx_u32mf2 | ( | ... | ) | __riscv_vxor_vx_u32mf2(__VA_ARGS__) |
| #define vxor_vx_u32mf2_m | ( | ... | ) | __riscv_vxor_vx_u32mf2_tumu(__VA_ARGS__) |
| #define vxor_vx_u64m1 | ( | ... | ) | __riscv_vxor_vx_u64m1(__VA_ARGS__) |
| #define vxor_vx_u64m1_m | ( | ... | ) | __riscv_vxor_vx_u64m1_tumu(__VA_ARGS__) |
| #define vxor_vx_u64m2 | ( | ... | ) | __riscv_vxor_vx_u64m2(__VA_ARGS__) |
| #define vxor_vx_u64m2_m | ( | ... | ) | __riscv_vxor_vx_u64m2_tumu(__VA_ARGS__) |
| #define vxor_vx_u64m4 | ( | ... | ) | __riscv_vxor_vx_u64m4(__VA_ARGS__) |
| #define vxor_vx_u64m4_m | ( | ... | ) | __riscv_vxor_vx_u64m4_tumu(__VA_ARGS__) |
| #define vxor_vx_u64m8 | ( | ... | ) | __riscv_vxor_vx_u64m8(__VA_ARGS__) |
| #define vxor_vx_u64m8_m | ( | ... | ) | __riscv_vxor_vx_u64m8_tumu(__VA_ARGS__) |
| #define vxor_vx_u8m1 | ( | ... | ) | __riscv_vxor_vx_u8m1(__VA_ARGS__) |
| #define vxor_vx_u8m1_m | ( | ... | ) | __riscv_vxor_vx_u8m1_tumu(__VA_ARGS__) |
| #define vxor_vx_u8m2 | ( | ... | ) | __riscv_vxor_vx_u8m2(__VA_ARGS__) |
| #define vxor_vx_u8m2_m | ( | ... | ) | __riscv_vxor_vx_u8m2_tumu(__VA_ARGS__) |
| #define vxor_vx_u8m4 | ( | ... | ) | __riscv_vxor_vx_u8m4(__VA_ARGS__) |
| #define vxor_vx_u8m4_m | ( | ... | ) | __riscv_vxor_vx_u8m4_tumu(__VA_ARGS__) |
| #define vxor_vx_u8m8 | ( | ... | ) | __riscv_vxor_vx_u8m8(__VA_ARGS__) |
| #define vxor_vx_u8m8_m | ( | ... | ) | __riscv_vxor_vx_u8m8_tumu(__VA_ARGS__) |
| #define vxor_vx_u8mf2 | ( | ... | ) | __riscv_vxor_vx_u8mf2(__VA_ARGS__) |
| #define vxor_vx_u8mf2_m | ( | ... | ) | __riscv_vxor_vx_u8mf2_tumu(__VA_ARGS__) |
| #define vxor_vx_u8mf4 | ( | ... | ) | __riscv_vxor_vx_u8mf4(__VA_ARGS__) |
| #define vxor_vx_u8mf4_m | ( | ... | ) | __riscv_vxor_vx_u8mf4_tumu(__VA_ARGS__) |
| #define vxor_vx_u8mf8 | ( | ... | ) | __riscv_vxor_vx_u8mf8(__VA_ARGS__) |
| #define vxor_vx_u8mf8_m | ( | ... | ) | __riscv_vxor_vx_u8mf8_tumu(__VA_ARGS__) |
| #define vzext_vf2_u16m1 | ( | ... | ) | __riscv_vzext_vf2_u16m1(__VA_ARGS__) |
| #define vzext_vf2_u16m1_m | ( | ... | ) | __riscv_vzext_vf2_u16m1_tumu(__VA_ARGS__) |
| #define vzext_vf2_u16m2 | ( | ... | ) | __riscv_vzext_vf2_u16m2(__VA_ARGS__) |
| #define vzext_vf2_u16m2_m | ( | ... | ) | __riscv_vzext_vf2_u16m2_tumu(__VA_ARGS__) |
| #define vzext_vf2_u16m4 | ( | ... | ) | __riscv_vzext_vf2_u16m4(__VA_ARGS__) |
| #define vzext_vf2_u16m4_m | ( | ... | ) | __riscv_vzext_vf2_u16m4_tumu(__VA_ARGS__) |
| #define vzext_vf2_u16m8 | ( | ... | ) | __riscv_vzext_vf2_u16m8(__VA_ARGS__) |
| #define vzext_vf2_u16m8_m | ( | ... | ) | __riscv_vzext_vf2_u16m8_tumu(__VA_ARGS__) |
| #define vzext_vf2_u16mf2 | ( | ... | ) | __riscv_vzext_vf2_u16mf2(__VA_ARGS__) |
| #define vzext_vf2_u16mf2_m | ( | ... | ) | __riscv_vzext_vf2_u16mf2_tumu(__VA_ARGS__) |
| #define vzext_vf2_u16mf4 | ( | ... | ) | __riscv_vzext_vf2_u16mf4(__VA_ARGS__) |
| #define vzext_vf2_u16mf4_m | ( | ... | ) | __riscv_vzext_vf2_u16mf4_tumu(__VA_ARGS__) |
| #define vzext_vf2_u32m1 | ( | ... | ) | __riscv_vzext_vf2_u32m1(__VA_ARGS__) |
| #define vzext_vf2_u32m1_m | ( | ... | ) | __riscv_vzext_vf2_u32m1_tumu(__VA_ARGS__) |
| #define vzext_vf2_u32m2 | ( | ... | ) | __riscv_vzext_vf2_u32m2(__VA_ARGS__) |
| #define vzext_vf2_u32m2_m | ( | ... | ) | __riscv_vzext_vf2_u32m2_tumu(__VA_ARGS__) |
| #define vzext_vf2_u32m4 | ( | ... | ) | __riscv_vzext_vf2_u32m4(__VA_ARGS__) |
| #define vzext_vf2_u32m4_m | ( | ... | ) | __riscv_vzext_vf2_u32m4_tumu(__VA_ARGS__) |
| #define vzext_vf2_u32m8 | ( | ... | ) | __riscv_vzext_vf2_u32m8(__VA_ARGS__) |
| #define vzext_vf2_u32m8_m | ( | ... | ) | __riscv_vzext_vf2_u32m8_tumu(__VA_ARGS__) |
| #define vzext_vf2_u32mf2 | ( | ... | ) | __riscv_vzext_vf2_u32mf2(__VA_ARGS__) |
| #define vzext_vf2_u32mf2_m | ( | ... | ) | __riscv_vzext_vf2_u32mf2_tumu(__VA_ARGS__) |
| #define vzext_vf2_u64m1 | ( | ... | ) | __riscv_vzext_vf2_u64m1(__VA_ARGS__) |
| #define vzext_vf2_u64m1_m | ( | ... | ) | __riscv_vzext_vf2_u64m1_tumu(__VA_ARGS__) |
| #define vzext_vf2_u64m2 | ( | ... | ) | __riscv_vzext_vf2_u64m2(__VA_ARGS__) |
| #define vzext_vf2_u64m2_m | ( | ... | ) | __riscv_vzext_vf2_u64m2_tumu(__VA_ARGS__) |
| #define vzext_vf2_u64m4 | ( | ... | ) | __riscv_vzext_vf2_u64m4(__VA_ARGS__) |
| #define vzext_vf2_u64m4_m | ( | ... | ) | __riscv_vzext_vf2_u64m4_tumu(__VA_ARGS__) |
| #define vzext_vf2_u64m8 | ( | ... | ) | __riscv_vzext_vf2_u64m8(__VA_ARGS__) |
| #define vzext_vf2_u64m8_m | ( | ... | ) | __riscv_vzext_vf2_u64m8_tumu(__VA_ARGS__) |
| #define vzext_vf4_u32m1 | ( | ... | ) | __riscv_vzext_vf4_u32m1(__VA_ARGS__) |
| #define vzext_vf4_u32m1_m | ( | ... | ) | __riscv_vzext_vf4_u32m1_tumu(__VA_ARGS__) |
| #define vzext_vf4_u32m2 | ( | ... | ) | __riscv_vzext_vf4_u32m2(__VA_ARGS__) |
| #define vzext_vf4_u32m2_m | ( | ... | ) | __riscv_vzext_vf4_u32m2_tumu(__VA_ARGS__) |
| #define vzext_vf4_u32m4 | ( | ... | ) | __riscv_vzext_vf4_u32m4(__VA_ARGS__) |
| #define vzext_vf4_u32m4_m | ( | ... | ) | __riscv_vzext_vf4_u32m4_tumu(__VA_ARGS__) |
| #define vzext_vf4_u32m8 | ( | ... | ) | __riscv_vzext_vf4_u32m8(__VA_ARGS__) |
| #define vzext_vf4_u32m8_m | ( | ... | ) | __riscv_vzext_vf4_u32m8_tumu(__VA_ARGS__) |
| #define vzext_vf4_u32mf2 | ( | ... | ) | __riscv_vzext_vf4_u32mf2(__VA_ARGS__) |
| #define vzext_vf4_u32mf2_m | ( | ... | ) | __riscv_vzext_vf4_u32mf2_tumu(__VA_ARGS__) |
| #define vzext_vf4_u64m1 | ( | ... | ) | __riscv_vzext_vf4_u64m1(__VA_ARGS__) |
| #define vzext_vf4_u64m1_m | ( | ... | ) | __riscv_vzext_vf4_u64m1_tumu(__VA_ARGS__) |
| #define vzext_vf4_u64m2 | ( | ... | ) | __riscv_vzext_vf4_u64m2(__VA_ARGS__) |
| #define vzext_vf4_u64m2_m | ( | ... | ) | __riscv_vzext_vf4_u64m2_tumu(__VA_ARGS__) |
| #define vzext_vf4_u64m4 | ( | ... | ) | __riscv_vzext_vf4_u64m4(__VA_ARGS__) |
| #define vzext_vf4_u64m4_m | ( | ... | ) | __riscv_vzext_vf4_u64m4_tumu(__VA_ARGS__) |
| #define vzext_vf4_u64m8 | ( | ... | ) | __riscv_vzext_vf4_u64m8(__VA_ARGS__) |
| #define vzext_vf4_u64m8_m | ( | ... | ) | __riscv_vzext_vf4_u64m8_tumu(__VA_ARGS__) |
| #define vzext_vf8_u64m1 | ( | ... | ) | __riscv_vzext_vf8_u64m1(__VA_ARGS__) |
| #define vzext_vf8_u64m1_m | ( | ... | ) | __riscv_vzext_vf8_u64m1_tumu(__VA_ARGS__) |
| #define vzext_vf8_u64m2 | ( | ... | ) | __riscv_vzext_vf8_u64m2(__VA_ARGS__) |
| #define vzext_vf8_u64m2_m | ( | ... | ) | __riscv_vzext_vf8_u64m2_tumu(__VA_ARGS__) |
| #define vzext_vf8_u64m4 | ( | ... | ) | __riscv_vzext_vf8_u64m4(__VA_ARGS__) |
| #define vzext_vf8_u64m4_m | ( | ... | ) | __riscv_vzext_vf8_u64m4_tumu(__VA_ARGS__) |
| #define vzext_vf8_u64m8 | ( | ... | ) | __riscv_vzext_vf8_u64m8(__VA_ARGS__) |
| #define vzext_vf8_u64m8_m | ( | ... | ) | __riscv_vzext_vf8_u64m8_tumu(__VA_ARGS__) |
1.9.8