view include/vec/impl/generic.h @ 37:4b5a557aa64f

*: turns out extern is a practical joke. rewrite to be always inline again the sample benchmark performs about 3x as well with optimizations disabled :)
author Paper <paper@tflc.us>
date Sat, 26 Apr 2025 01:04:35 -0400
parents 677c03c382b8
children fd42f9b1b95e
line wrap: on
line source

/**
 * vec - a tiny SIMD vector library in plain C99
 * 
 * Copyright (c) 2024 Paper
 * 
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 * 
 * The above copyright notice and this permission notice shall be included in all
 * copies or substantial portions of the Software.
 * 
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
 * SOFTWARE.
**/

/* This file is automatically generated! Do not edit it directly!
 * Edit the code that generates it in utils/gengeneric.c  --paper */

#ifndef VEC_IMPL_GENERIC_H_
#define VEC_IMPL_GENERIC_H_

#include <string.h>

// -----------------------------------------------------------------

#define VEC_GENERIC_OPERATION(op, sign, bits, size) \
	do { \
		int i; \
	\
		for (i = 0; i < size; i++) \
			vec1.generic[i] = (op); \
	\
		return vec1; \
	} while (0)

#define VEC_GENERIC_BUILTIN_OPERATION(op, sign, bits, size) \
	VEC_GENERIC_OPERATION(vec1.generic[i] op vec2.generic[i], sign, bits, size)

#define VEC_GENERIC_CMP(op, sign, bits, size) \
	VEC_GENERIC_OPERATION((vec1.generic[i] op vec2.generic[i]) ? (vec_##sign##int##bits)VEC_MAX_OF_TYPE(vec_uint##bits) : 0, sign, bits, size)

/* okay, now we can do this crap: */

#define VEC_GENERIC_SPLAT(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_splat(vec_##sign##int##bits x) \
	{ \
		v##sign##int##bits##x##size vec; \
		for (int i = 0; i < size; i++) \
			vec.generic[i] = x; \
		return vec; \
	}

#define VEC_GENERIC_LOAD_EX(name, sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_##name(const vec_##sign##int##bits in[size]) \
	{ \
		v##sign##int##bits##x##size vec; \
		memcpy(&vec, in, sizeof(vec_##sign##int##bits) * size); \
		return vec; \
	}

#define VEC_GENERIC_LOAD_ALIGNED(sign, bits, size) VEC_GENERIC_LOAD_EX(load_aligned, sign, bits, size)
#define VEC_GENERIC_LOAD(sign, bits, size) VEC_GENERIC_LOAD_EX(load, sign, bits, size)

#define VEC_GENERIC_STORE_EX(name, sign, bits, size) \
	VEC_FUNC_IMPL void v##sign##int##bits##x##size##_##name(v##sign##int##bits##x##size vec, vec_##sign##int##bits out[size]) \
	{ \
		memcpy(out, &vec, sizeof(vec_##sign##int##bits) * size); \
	}

#define VEC_GENERIC_STORE_ALIGNED(sign, bits, size) VEC_GENERIC_STORE_EX(store_aligned, sign, bits, size)
#define VEC_GENERIC_STORE(sign, bits, size) VEC_GENERIC_STORE_EX(store, sign, bits, size)

#define VEC_GENERIC_ADD(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_add(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_BUILTIN_OPERATION(+, sign, bits, size); \
	}

#define VEC_GENERIC_SUB(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_sub(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_BUILTIN_OPERATION(-, sign, bits, size); \
	}

#define VEC_GENERIC_MUL(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_mul(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_BUILTIN_OPERATION(*, sign, bits, size); \
	}

#define VEC_GENERIC_DIV(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_div(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_OPERATION(vec2.generic[i] ? (vec1.generic[i] / vec2.generic[i]) : 0, sign, bits, size); \
	}

#define VEC_GENERIC_AVG(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_avg(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		for (int i = 0; i < size; i++) \
			vec1.generic[i] = vec_##sign##avg(vec1.generic[i], vec2.generic[i]); \
	\
		return vec1; \
	}

#define VEC_GENERIC_AND(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_and(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_BUILTIN_OPERATION(&, sign, bits, size); \
	}

#define VEC_GENERIC_OR(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_or(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_BUILTIN_OPERATION(|, sign, bits, size); \
	}

#define VEC_GENERIC_XOR(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_xor(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_BUILTIN_OPERATION(^, sign, bits, size); \
	}

#define VEC_GENERIC_NOT(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_not(v##sign##int##bits##x##size vec) \
	{ \
		return v##sign##int##bits##x##size##_xor(vec, v##sign##int##bits##x##size##_splat((vec_##sign##int##bits)VEC_MAX_OF_TYPE(vec_uint##bits))); \
	}

#define VEC_GENERIC_CMPLT(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_cmplt(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_CMP(<, sign, bits, size); \
	}

#define VEC_GENERIC_CMPLE(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_cmple(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		return v##sign##int##bits##x##size##_not(v##sign##int##bits##x##size##_cmpgt(vec1, vec2)); \
	}

#define VEC_GENERIC_CMPEQ(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_cmpeq(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_CMP(==, sign, bits, size); \
	}

#define VEC_GENERIC_CMPGE(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_cmpge(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		return v##sign##int##bits##x##size##_not(v##sign##int##bits##x##size##_cmplt(vec1, vec2)); \
	}

#define VEC_GENERIC_CMPGT(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_cmpgt(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		VEC_GENERIC_CMP(>, sign, bits, size); \
	}

#define VEC_GENERIC_LSHIFT(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_lshift(v##sign##int##bits##x##size vec1, vuint##bits##x##size vec2) \
	{ \
		VEC_GENERIC_OPERATION(vec_##sign##lshift(vec1.generic[i], vec2.generic[i]), sign, bits, size); \
	}

#define VEC_GENERIC_RSHIFT(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_rshift(v##sign##int##bits##x##size vec1, vuint##bits##x##size vec2) \
	{ \
		VEC_GENERIC_OPERATION(vec_##sign##rshift(vec1.generic[i], vec2.generic[i]), sign, bits, size); \
	}

#define VEC_GENERIC_LRSHIFT(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_lrshift(v##sign##int##bits##x##size vec1, vuint##bits##x##size vec2) \
	{ \
		VEC_GENERIC_OPERATION(vec_urshift((vec_uint##bits)vec1.generic[i], vec2.generic[i]), sign, bits, size); \
	}

#define VEC_GENERIC_MIN(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_min(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		v##sign##int##bits##x##size cmplt = v##sign##int##bits##x##size##_cmplt(vec1, vec2); \
	\
		v##sign##int##bits##x##size a = v##sign##int##bits##x##size##_and(vec1, cmplt); \
		v##sign##int##bits##x##size b = v##sign##int##bits##x##size##_and(vec2, v##sign##int##bits##x##size##_not(cmplt)); \
	\
		return v##sign##int##bits##x##size##_or(a, b); \
	}

#define VEC_GENERIC_MAX(sign, bits, size) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_max(v##sign##int##bits##x##size vec1, v##sign##int##bits##x##size vec2) \
	{ \
		v##sign##int##bits##x##size cmplt = v##sign##int##bits##x##size##_cmpgt(vec1, vec2); \
	\
		v##sign##int##bits##x##size a = v##sign##int##bits##x##size##_and(vec1, cmplt); \
		v##sign##int##bits##x##size b = v##sign##int##bits##x##size##_and(vec2, v##sign##int##bits##x##size##_not(cmplt)); \
	\
		return v##sign##int##bits##x##size##_or(a, b); \
	}

#define VEC_GENERIC_DBL_SPLAT(sign, bits, size, halfsize) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_splat(vec_##sign##int##bits x) \
	{ \
		v##sign##int##bits##x##size vec; \
	\
		vec.generic[0] = v##sign##int##bits##x##halfsize##_splat(x); \
		vec.generic[1] = v##sign##int##bits##x##halfsize##_splat(x); \
	\
		return vec; \
	}

#define VEC_GENERIC_DBL_LOAD_EX(name, sign, bits, size, halfsize) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_##name(const vec_##sign##int##bits x[size]) \
	{ \
		v##sign##int##bits##x##size vec; \
	\
		vec.generic[0] = v##sign##int##bits##x##halfsize##_##name(x); \
		vec.generic[1] = v##sign##int##bits##x##halfsize##_##name(x + halfsize); \
	\
		return vec; \
	}

#define VEC_GENERIC_DBL_LOAD(sign, bits, size, halfsize) VEC_GENERIC_DBL_LOAD_EX(load, sign, bits, size, halfsize)
#define VEC_GENERIC_DBL_LOAD_ALIGNED(sign, bits, size, halfsize) VEC_GENERIC_DBL_LOAD_EX(load_aligned, sign, bits, size, halfsize)

#define VEC_GENERIC_DBL_STORE_EX(name, sign, bits, size, halfsize) \
	VEC_FUNC_IMPL void v##sign##int##bits##x##size##_##name(v##sign##int##bits##x##size vec, vec_##sign##int##bits x[size]) \
	{ \
		v##sign##int##bits##x##halfsize##_##name(vec.generic[0], x); \
		v##sign##int##bits##x##halfsize##_##name(vec.generic[1], x + halfsize); \
	}

#define VEC_GENERIC_DBL_STORE(sign, bits, size, halfsize) VEC_GENERIC_DBL_STORE_EX(store, sign, bits, size, halfsize)
#define VEC_GENERIC_DBL_STORE_ALIGNED(sign, bits, size, halfsize) VEC_GENERIC_DBL_STORE_EX(store_aligned, sign, bits, size, halfsize)

#define VEC_GENERIC_DBL_OP(name, sign, bits, size, halfsize, secondsign) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_##name(v##sign##int##bits##x##size vec1, v##secondsign##int##bits##x##size vec2) \
	{ \
		vec1.generic[0] = v##sign##int##bits##x##halfsize##_##name(vec1.generic[0], vec2.generic[0]); \
		vec1.generic[1] = v##sign##int##bits##x##halfsize##_##name(vec1.generic[1], vec2.generic[1]); \
	\
		return vec1; \
	}

#define VEC_GENERIC_DBL_ADD(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(add, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_SUB(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(sub, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_MUL(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(mul, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_DIV(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(div, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_AVG(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(avg, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_LSHIFT(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(lshift, sign, bits, size, halfsize, u)
#define VEC_GENERIC_DBL_RSHIFT(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(rshift, sign, bits, size, halfsize, u)
#define VEC_GENERIC_DBL_LRSHIFT(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(lrshift, sign, bits, size, halfsize, u)
#define VEC_GENERIC_DBL_AND(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(and, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_OR(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(or, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_XOR(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(xor, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_MIN(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(min, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_MAX(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(max, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_CMPLT(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(cmplt, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_CMPLE(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(cmple, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_CMPEQ(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(cmpeq, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_CMPGE(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(cmpge, sign, bits, size, halfsize, sign)
#define VEC_GENERIC_DBL_CMPGT(sign, bits, size, halfsize) VEC_GENERIC_DBL_OP(cmpgt, sign, bits, size, halfsize, sign)

#define VEC_GENERIC_DBL_NOT(sign, bits, size, halfsize) \
	VEC_FUNC_IMPL v##sign##int##bits##x##size v##sign##int##bits##x##size##_not(v##sign##int##bits##x##size vec) \
	{ \
		vec.generic[0] = v##sign##int##bits##x##halfsize##_not(vec.generic[0]); \
		vec.generic[1] = v##sign##int##bits##x##halfsize##_not(vec.generic[1]); \
	\
		return vec; \
	}

/* ------------------------------------------------------------------------ */
/* PREPROCESSOR HELL INCOMING */



/* vuint8x2 */

#ifndef VINT8x2_SPLAT_DEFINED
VEC_GENERIC_SPLAT(/* nothing */, 8, 2)
# define VINT8x2_SPLAT_DEFINED
#endif
#ifndef VINT8x2_LOAD_ALIGNED_DEFINED
VEC_GENERIC_LOAD_ALIGNED(/* nothing */, 8, 2)
# define VINT8x2_LOAD_ALIGNED_DEFINED
#endif
#ifndef VINT8x2_LOAD_DEFINED
VEC_GENERIC_LOAD(/* nothing */, 8, 2)
# define VINT8x2_LOAD_DEFINED
#endif
#ifndef VINT8x2_STORE_ALIGNED_DEFINED
VEC_GENERIC_STORE_ALIGNED(/* nothing */, 8, 2)
# define VINT8x2_STORE_ALIGNED_DEFINED
#endif
#ifndef VINT8x2_STORE_DEFINED
VEC_GENERIC_STORE(/* nothing */, 8, 2)
# define VINT8x2_STORE_DEFINED
#endif
#ifndef VINT8x2_ADD_DEFINED
VEC_GENERIC_ADD(/* nothing */, 8, 2)
# define VINT8x2_ADD_DEFINED
#endif
#ifndef VINT8x2_SUB_DEFINED
VEC_GENERIC_SUB(/* nothing */, 8, 2)
# define VINT8x2_SUB_DEFINED
#endif
#ifndef VINT8x2_MUL_DEFINED
VEC_GENERIC_MUL(/* nothing */, 8, 2)
# define VINT8x2_MUL_DEFINED
#endif
#ifndef VINT8x2_DIV_DEFINED
VEC_GENERIC_DIV(/* nothing */, 8, 2)
# define VINT8x2_DIV_DEFINED
#endif
#ifndef VINT8x2_AVG_DEFINED
VEC_GENERIC_AVG(/* nothing */, 8, 2)
# define VINT8x2_AVG_DEFINED
#endif
#ifndef VINT8x2_AND_DEFINED
VEC_GENERIC_AND(/* nothing */, 8, 2)
# define VINT8x2_AND_DEFINED
#endif
#ifndef VINT8x2_OR_DEFINED
VEC_GENERIC_OR(/* nothing */, 8, 2)
# define VINT8x2_OR_DEFINED
#endif
#ifndef VINT8x2_XOR_DEFINED
VEC_GENERIC_XOR(/* nothing */, 8, 2)
# define VINT8x2_XOR_DEFINED
#endif
#ifndef VINT8x2_NOT_DEFINED
VEC_GENERIC_NOT(/* nothing */, 8, 2)
# define VINT8x2_NOT_DEFINED
#endif
#ifndef VINT8x2_CMPLT_DEFINED
VEC_GENERIC_CMPLT(/* nothing */, 8, 2)
# define VINT8x2_CMPLT_DEFINED
#endif
#ifndef VINT8x2_CMPEQ_DEFINED
VEC_GENERIC_CMPEQ(/* nothing */, 8, 2)
# define VINT8x2_CMPEQ_DEFINED
#endif
#ifndef VINT8x2_CMPGT_DEFINED
VEC_GENERIC_CMPGT(/* nothing */, 8, 2)
# define VINT8x2_CMPGT_DEFINED
#endif
#ifndef VINT8x2_CMPLE_DEFINED
VEC_GENERIC_CMPLE(/* nothing */, 8, 2)
# define VINT8x2_CMPLE_DEFINED
#endif
#ifndef VINT8x2_CMPGE_DEFINED
VEC_GENERIC_CMPGE(/* nothing */, 8, 2)
# define VINT8x2_CMPGE_DEFINED
#endif
#ifndef VINT8x2_MIN_DEFINED
VEC_GENERIC_MIN(/* nothing */, 8, 2)
# define VINT8x2_MIN_DEFINED
#endif
#ifndef VINT8x2_MAX_DEFINED
VEC_GENERIC_MAX(/* nothing */, 8, 2)
# define VINT8x2_MAX_DEFINED
#endif
#ifndef VINT8x2_RSHIFT_DEFINED
VEC_GENERIC_RSHIFT(/* nothing */, 8, 2)
# define VINT8x2_RSHIFT_DEFINED
#endif
#ifndef VINT8x2_LRSHIFT_DEFINED
VEC_GENERIC_LRSHIFT(/* nothing */, 8, 2)
# define VINT8x2_LRSHIFT_DEFINED
#endif
#ifndef VINT8x2_LSHIFT_DEFINED
VEC_GENERIC_LSHIFT(/* nothing */, 8, 2)
# define VINT8x2_LSHIFT_DEFINED
#endif


/* vint8x2 */

#ifndef VUINT8x2_SPLAT_DEFINED
VEC_GENERIC_SPLAT(u, 8, 2)
# define VUINT8x2_SPLAT_DEFINED
#endif
#ifndef VUINT8x2_LOAD_ALIGNED_DEFINED
VEC_GENERIC_LOAD_ALIGNED(u, 8, 2)
# define VUINT8x2_LOAD_ALIGNED_DEFINED
#endif
#ifndef VUINT8x2_LOAD_DEFINED
VEC_GENERIC_LOAD(u, 8, 2)
# define VUINT8x2_LOAD_DEFINED
#endif
#ifndef VUINT8x2_STORE_ALIGNED_DEFINED
VEC_GENERIC_STORE_ALIGNED(u, 8, 2)
# define VUINT8x2_STORE_ALIGNED_DEFINED
#endif
#ifndef VUINT8x2_STORE_DEFINED
VEC_GENERIC_STORE(u, 8, 2)
# define VUINT8x2_STORE_DEFINED
#endif
#ifndef VUINT8x2_ADD_DEFINED
VEC_GENERIC_ADD(u, 8, 2)
# define VUINT8x2_ADD_DEFINED
#endif
#ifndef VUINT8x2_SUB_DEFINED
VEC_GENERIC_SUB(u, 8, 2)
# define VUINT8x2_SUB_DEFINED
#endif
#ifndef VUINT8x2_MUL_DEFINED
VEC_GENERIC_MUL(u, 8, 2)
# define VUINT8x2_MUL_DEFINED
#endif
#ifndef VUINT8x2_DIV_DEFINED
VEC_GENERIC_DIV(u, 8, 2)
# define VUINT8x2_DIV_DEFINED
#endif
#ifndef VUINT8x2_AVG_DEFINED
VEC_GENERIC_AVG(u, 8, 2)
# define VUINT8x2_AVG_DEFINED
#endif
#ifndef VUINT8x2_AND_DEFINED
VEC_GENERIC_AND(u, 8, 2)
# define VUINT8x2_AND_DEFINED
#endif
#ifndef VUINT8x2_OR_DEFINED
VEC_GENERIC_OR(u, 8, 2)
# define VUINT8x2_OR_DEFINED
#endif
#ifndef VUINT8x2_XOR_DEFINED
VEC_GENERIC_XOR(u, 8, 2)
# define VUINT8x2_XOR_DEFINED
#endif
#ifndef VUINT8x2_NOT_DEFINED
VEC_GENERIC_NOT(u, 8, 2)
# define VUINT8x2_NOT_DEFINED
#endif
#ifndef VUINT8x2_CMPLT_DEFINED
VEC_GENERIC_CMPLT(u, 8, 2)
# define VUINT8x2_CMPLT_DEFINED
#endif
#ifndef VUINT8x2_CMPEQ_DEFINED
VEC_GENERIC_CMPEQ(u, 8, 2)
# define VUINT8x2_CMPEQ_DEFINED
#endif
#ifndef VUINT8x2_CMPGT_DEFINED
VEC_GENERIC_CMPGT(u, 8, 2)
# define VUINT8x2_CMPGT_DEFINED
#endif
#ifndef VUINT8x2_CMPLE_DEFINED
VEC_GENERIC_CMPLE(u, 8, 2)
# define VUINT8x2_CMPLE_DEFINED
#endif
#ifndef VUINT8x2_CMPGE_DEFINED
VEC_GENERIC_CMPGE(u, 8, 2)
# define VUINT8x2_CMPGE_DEFINED
#endif
#ifndef VUINT8x2_MIN_DEFINED
VEC_GENERIC_MIN(u, 8, 2)
# define VUINT8x2_MIN_DEFINED
#endif
#ifndef VUINT8x2_MAX_DEFINED
VEC_GENERIC_MAX(u, 8, 2)
# define VUINT8x2_MAX_DEFINED
#endif
#ifndef VUINT8x2_RSHIFT_DEFINED
VEC_GENERIC_RSHIFT(u, 8, 2)
# define VUINT8x2_RSHIFT_DEFINED
#endif
#ifndef VUINT8x2_LRSHIFT_DEFINED
VEC_GENERIC_LRSHIFT(u, 8, 2)
# define VUINT8x2_LRSHIFT_DEFINED
#endif
#ifndef VUINT8x2_LSHIFT_DEFINED
VEC_GENERIC_LSHIFT(u, 8, 2)
# define VUINT8x2_LSHIFT_DEFINED
#endif


/* vuint8x4 */

#ifndef VINT8x4_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 8, 4, 2)
# define VINT8x4_SPLAT_DEFINED
#endif

#ifndef VINT8x4_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 8, 4, 2)
# define VINT8x4_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT8x4_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 8, 4, 2)
# define VINT8x4_LOAD_DEFINED
#endif

#ifndef VINT8x4_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 8, 4, 2)
# define VINT8x4_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT8x4_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 8, 4, 2)
# define VINT8x4_STORE_DEFINED
#endif

#ifndef VINT8x4_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 8, 4, 2)
# define VINT8x4_ADD_DEFINED
#endif

#ifndef VINT8x4_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 8, 4, 2)
# define VINT8x4_SUB_DEFINED
#endif

#ifndef VINT8x4_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 8, 4, 2)
# define VINT8x4_MUL_DEFINED
#endif

#ifndef VINT8x4_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 8, 4, 2)
# define VINT8x4_DIV_DEFINED
#endif

#ifndef VINT8x4_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 8, 4, 2)
# define VINT8x4_AVG_DEFINED
#endif

#ifndef VINT8x4_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 8, 4, 2)
# define VINT8x4_AND_DEFINED
#endif

#ifndef VINT8x4_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 8, 4, 2)
# define VINT8x4_OR_DEFINED
#endif

#ifndef VINT8x4_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 8, 4, 2)
# define VINT8x4_XOR_DEFINED
#endif

#ifndef VINT8x4_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 8, 4, 2)
# define VINT8x4_NOT_DEFINED
#endif

#ifndef VINT8x4_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 8, 4, 2)
# define VINT8x4_CMPLT_DEFINED
#endif

#ifndef VINT8x4_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 8, 4, 2)
# define VINT8x4_CMPEQ_DEFINED
#endif

#ifndef VINT8x4_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 8, 4, 2)
# define VINT8x4_CMPGT_DEFINED
#endif

#ifndef VINT8x4_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 8, 4, 2)
# define VINT8x4_CMPLE_DEFINED
#endif

#ifndef VINT8x4_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 8, 4, 2)
# define VINT8x4_CMPGE_DEFINED
#endif

#ifndef VINT8x4_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 8, 4, 2)
# define VINT8x4_MIN_DEFINED
#endif

#ifndef VINT8x4_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 8, 4, 2)
# define VINT8x4_MAX_DEFINED
#endif

#ifndef VINT8x4_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 8, 4, 2)
# define VINT8x4_RSHIFT_DEFINED
#endif

#ifndef VINT8x4_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 8, 4, 2)
# define VINT8x4_LRSHIFT_DEFINED
#endif

#ifndef VINT8x4_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 8, 4, 2)
# define VINT8x4_LSHIFT_DEFINED
#endif



/* vint8x4 */

#ifndef VUINT8x4_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 8, 4, 2)
# define VUINT8x4_SPLAT_DEFINED
#endif

#ifndef VUINT8x4_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 8, 4, 2)
# define VUINT8x4_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT8x4_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 8, 4, 2)
# define VUINT8x4_LOAD_DEFINED
#endif

#ifndef VUINT8x4_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 8, 4, 2)
# define VUINT8x4_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT8x4_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 8, 4, 2)
# define VUINT8x4_STORE_DEFINED
#endif

#ifndef VUINT8x4_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 8, 4, 2)
# define VUINT8x4_ADD_DEFINED
#endif

#ifndef VUINT8x4_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 8, 4, 2)
# define VUINT8x4_SUB_DEFINED
#endif

#ifndef VUINT8x4_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 8, 4, 2)
# define VUINT8x4_MUL_DEFINED
#endif

#ifndef VUINT8x4_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 8, 4, 2)
# define VUINT8x4_DIV_DEFINED
#endif

#ifndef VUINT8x4_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 8, 4, 2)
# define VUINT8x4_AVG_DEFINED
#endif

#ifndef VUINT8x4_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 8, 4, 2)
# define VUINT8x4_AND_DEFINED
#endif

#ifndef VUINT8x4_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 8, 4, 2)
# define VUINT8x4_OR_DEFINED
#endif

#ifndef VUINT8x4_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 8, 4, 2)
# define VUINT8x4_XOR_DEFINED
#endif

#ifndef VUINT8x4_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 8, 4, 2)
# define VUINT8x4_NOT_DEFINED
#endif

#ifndef VUINT8x4_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 8, 4, 2)
# define VUINT8x4_CMPLT_DEFINED
#endif

#ifndef VUINT8x4_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 8, 4, 2)
# define VUINT8x4_CMPEQ_DEFINED
#endif

#ifndef VUINT8x4_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 8, 4, 2)
# define VUINT8x4_CMPGT_DEFINED
#endif

#ifndef VUINT8x4_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 8, 4, 2)
# define VUINT8x4_CMPLE_DEFINED
#endif

#ifndef VUINT8x4_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 8, 4, 2)
# define VUINT8x4_CMPGE_DEFINED
#endif

#ifndef VUINT8x4_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 8, 4, 2)
# define VUINT8x4_MIN_DEFINED
#endif

#ifndef VUINT8x4_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 8, 4, 2)
# define VUINT8x4_MAX_DEFINED
#endif

#ifndef VUINT8x4_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 8, 4, 2)
# define VUINT8x4_RSHIFT_DEFINED
#endif

#ifndef VUINT8x4_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 8, 4, 2)
# define VUINT8x4_LRSHIFT_DEFINED
#endif

#ifndef VUINT8x4_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 8, 4, 2)
# define VUINT8x4_LSHIFT_DEFINED
#endif



/* vuint8x8 */

#ifndef VINT8x8_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 8, 8, 4)
# define VINT8x8_SPLAT_DEFINED
#endif

#ifndef VINT8x8_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 8, 8, 4)
# define VINT8x8_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT8x8_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 8, 8, 4)
# define VINT8x8_LOAD_DEFINED
#endif

#ifndef VINT8x8_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 8, 8, 4)
# define VINT8x8_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT8x8_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 8, 8, 4)
# define VINT8x8_STORE_DEFINED
#endif

#ifndef VINT8x8_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 8, 8, 4)
# define VINT8x8_ADD_DEFINED
#endif

#ifndef VINT8x8_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 8, 8, 4)
# define VINT8x8_SUB_DEFINED
#endif

#ifndef VINT8x8_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 8, 8, 4)
# define VINT8x8_MUL_DEFINED
#endif

#ifndef VINT8x8_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 8, 8, 4)
# define VINT8x8_DIV_DEFINED
#endif

#ifndef VINT8x8_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 8, 8, 4)
# define VINT8x8_AVG_DEFINED
#endif

#ifndef VINT8x8_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 8, 8, 4)
# define VINT8x8_AND_DEFINED
#endif

#ifndef VINT8x8_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 8, 8, 4)
# define VINT8x8_OR_DEFINED
#endif

#ifndef VINT8x8_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 8, 8, 4)
# define VINT8x8_XOR_DEFINED
#endif

#ifndef VINT8x8_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 8, 8, 4)
# define VINT8x8_NOT_DEFINED
#endif

#ifndef VINT8x8_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 8, 8, 4)
# define VINT8x8_CMPLT_DEFINED
#endif

#ifndef VINT8x8_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 8, 8, 4)
# define VINT8x8_CMPEQ_DEFINED
#endif

#ifndef VINT8x8_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 8, 8, 4)
# define VINT8x8_CMPGT_DEFINED
#endif

#ifndef VINT8x8_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 8, 8, 4)
# define VINT8x8_CMPLE_DEFINED
#endif

#ifndef VINT8x8_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 8, 8, 4)
# define VINT8x8_CMPGE_DEFINED
#endif

#ifndef VINT8x8_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 8, 8, 4)
# define VINT8x8_MIN_DEFINED
#endif

#ifndef VINT8x8_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 8, 8, 4)
# define VINT8x8_MAX_DEFINED
#endif

#ifndef VINT8x8_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 8, 8, 4)
# define VINT8x8_RSHIFT_DEFINED
#endif

#ifndef VINT8x8_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 8, 8, 4)
# define VINT8x8_LRSHIFT_DEFINED
#endif

#ifndef VINT8x8_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 8, 8, 4)
# define VINT8x8_LSHIFT_DEFINED
#endif



/* vint8x8 */

#ifndef VUINT8x8_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 8, 8, 4)
# define VUINT8x8_SPLAT_DEFINED
#endif

#ifndef VUINT8x8_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 8, 8, 4)
# define VUINT8x8_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT8x8_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 8, 8, 4)
# define VUINT8x8_LOAD_DEFINED
#endif

#ifndef VUINT8x8_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 8, 8, 4)
# define VUINT8x8_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT8x8_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 8, 8, 4)
# define VUINT8x8_STORE_DEFINED
#endif

#ifndef VUINT8x8_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 8, 8, 4)
# define VUINT8x8_ADD_DEFINED
#endif

#ifndef VUINT8x8_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 8, 8, 4)
# define VUINT8x8_SUB_DEFINED
#endif

#ifndef VUINT8x8_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 8, 8, 4)
# define VUINT8x8_MUL_DEFINED
#endif

#ifndef VUINT8x8_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 8, 8, 4)
# define VUINT8x8_DIV_DEFINED
#endif

#ifndef VUINT8x8_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 8, 8, 4)
# define VUINT8x8_AVG_DEFINED
#endif

#ifndef VUINT8x8_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 8, 8, 4)
# define VUINT8x8_AND_DEFINED
#endif

#ifndef VUINT8x8_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 8, 8, 4)
# define VUINT8x8_OR_DEFINED
#endif

#ifndef VUINT8x8_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 8, 8, 4)
# define VUINT8x8_XOR_DEFINED
#endif

#ifndef VUINT8x8_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 8, 8, 4)
# define VUINT8x8_NOT_DEFINED
#endif

#ifndef VUINT8x8_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 8, 8, 4)
# define VUINT8x8_CMPLT_DEFINED
#endif

#ifndef VUINT8x8_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 8, 8, 4)
# define VUINT8x8_CMPEQ_DEFINED
#endif

#ifndef VUINT8x8_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 8, 8, 4)
# define VUINT8x8_CMPGT_DEFINED
#endif

#ifndef VUINT8x8_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 8, 8, 4)
# define VUINT8x8_CMPLE_DEFINED
#endif

#ifndef VUINT8x8_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 8, 8, 4)
# define VUINT8x8_CMPGE_DEFINED
#endif

#ifndef VUINT8x8_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 8, 8, 4)
# define VUINT8x8_MIN_DEFINED
#endif

#ifndef VUINT8x8_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 8, 8, 4)
# define VUINT8x8_MAX_DEFINED
#endif

#ifndef VUINT8x8_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 8, 8, 4)
# define VUINT8x8_RSHIFT_DEFINED
#endif

#ifndef VUINT8x8_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 8, 8, 4)
# define VUINT8x8_LRSHIFT_DEFINED
#endif

#ifndef VUINT8x8_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 8, 8, 4)
# define VUINT8x8_LSHIFT_DEFINED
#endif



/* vuint8x16 */

#ifndef VINT8x16_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 8, 16, 8)
# define VINT8x16_SPLAT_DEFINED
#endif

#ifndef VINT8x16_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 8, 16, 8)
# define VINT8x16_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT8x16_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 8, 16, 8)
# define VINT8x16_LOAD_DEFINED
#endif

#ifndef VINT8x16_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 8, 16, 8)
# define VINT8x16_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT8x16_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 8, 16, 8)
# define VINT8x16_STORE_DEFINED
#endif

#ifndef VINT8x16_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 8, 16, 8)
# define VINT8x16_ADD_DEFINED
#endif

#ifndef VINT8x16_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 8, 16, 8)
# define VINT8x16_SUB_DEFINED
#endif

#ifndef VINT8x16_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 8, 16, 8)
# define VINT8x16_MUL_DEFINED
#endif

#ifndef VINT8x16_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 8, 16, 8)
# define VINT8x16_DIV_DEFINED
#endif

#ifndef VINT8x16_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 8, 16, 8)
# define VINT8x16_AVG_DEFINED
#endif

#ifndef VINT8x16_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 8, 16, 8)
# define VINT8x16_AND_DEFINED
#endif

#ifndef VINT8x16_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 8, 16, 8)
# define VINT8x16_OR_DEFINED
#endif

#ifndef VINT8x16_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 8, 16, 8)
# define VINT8x16_XOR_DEFINED
#endif

#ifndef VINT8x16_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 8, 16, 8)
# define VINT8x16_NOT_DEFINED
#endif

#ifndef VINT8x16_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 8, 16, 8)
# define VINT8x16_CMPLT_DEFINED
#endif

#ifndef VINT8x16_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 8, 16, 8)
# define VINT8x16_CMPEQ_DEFINED
#endif

#ifndef VINT8x16_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 8, 16, 8)
# define VINT8x16_CMPGT_DEFINED
#endif

#ifndef VINT8x16_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 8, 16, 8)
# define VINT8x16_CMPLE_DEFINED
#endif

#ifndef VINT8x16_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 8, 16, 8)
# define VINT8x16_CMPGE_DEFINED
#endif

#ifndef VINT8x16_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 8, 16, 8)
# define VINT8x16_MIN_DEFINED
#endif

#ifndef VINT8x16_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 8, 16, 8)
# define VINT8x16_MAX_DEFINED
#endif

#ifndef VINT8x16_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 8, 16, 8)
# define VINT8x16_RSHIFT_DEFINED
#endif

#ifndef VINT8x16_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 8, 16, 8)
# define VINT8x16_LRSHIFT_DEFINED
#endif

#ifndef VINT8x16_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 8, 16, 8)
# define VINT8x16_LSHIFT_DEFINED
#endif



/* vint8x16 */

#ifndef VUINT8x16_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 8, 16, 8)
# define VUINT8x16_SPLAT_DEFINED
#endif

#ifndef VUINT8x16_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 8, 16, 8)
# define VUINT8x16_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT8x16_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 8, 16, 8)
# define VUINT8x16_LOAD_DEFINED
#endif

#ifndef VUINT8x16_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 8, 16, 8)
# define VUINT8x16_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT8x16_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 8, 16, 8)
# define VUINT8x16_STORE_DEFINED
#endif

#ifndef VUINT8x16_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 8, 16, 8)
# define VUINT8x16_ADD_DEFINED
#endif

#ifndef VUINT8x16_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 8, 16, 8)
# define VUINT8x16_SUB_DEFINED
#endif

#ifndef VUINT8x16_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 8, 16, 8)
# define VUINT8x16_MUL_DEFINED
#endif

#ifndef VUINT8x16_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 8, 16, 8)
# define VUINT8x16_DIV_DEFINED
#endif

#ifndef VUINT8x16_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 8, 16, 8)
# define VUINT8x16_AVG_DEFINED
#endif

#ifndef VUINT8x16_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 8, 16, 8)
# define VUINT8x16_AND_DEFINED
#endif

#ifndef VUINT8x16_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 8, 16, 8)
# define VUINT8x16_OR_DEFINED
#endif

#ifndef VUINT8x16_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 8, 16, 8)
# define VUINT8x16_XOR_DEFINED
#endif

#ifndef VUINT8x16_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 8, 16, 8)
# define VUINT8x16_NOT_DEFINED
#endif

#ifndef VUINT8x16_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 8, 16, 8)
# define VUINT8x16_CMPLT_DEFINED
#endif

#ifndef VUINT8x16_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 8, 16, 8)
# define VUINT8x16_CMPEQ_DEFINED
#endif

#ifndef VUINT8x16_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 8, 16, 8)
# define VUINT8x16_CMPGT_DEFINED
#endif

#ifndef VUINT8x16_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 8, 16, 8)
# define VUINT8x16_CMPLE_DEFINED
#endif

#ifndef VUINT8x16_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 8, 16, 8)
# define VUINT8x16_CMPGE_DEFINED
#endif

#ifndef VUINT8x16_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 8, 16, 8)
# define VUINT8x16_MIN_DEFINED
#endif

#ifndef VUINT8x16_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 8, 16, 8)
# define VUINT8x16_MAX_DEFINED
#endif

#ifndef VUINT8x16_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 8, 16, 8)
# define VUINT8x16_RSHIFT_DEFINED
#endif

#ifndef VUINT8x16_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 8, 16, 8)
# define VUINT8x16_LRSHIFT_DEFINED
#endif

#ifndef VUINT8x16_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 8, 16, 8)
# define VUINT8x16_LSHIFT_DEFINED
#endif



/* vuint8x32 */

#ifndef VINT8x32_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 8, 32, 16)
# define VINT8x32_SPLAT_DEFINED
#endif

#ifndef VINT8x32_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 8, 32, 16)
# define VINT8x32_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT8x32_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 8, 32, 16)
# define VINT8x32_LOAD_DEFINED
#endif

#ifndef VINT8x32_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 8, 32, 16)
# define VINT8x32_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT8x32_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 8, 32, 16)
# define VINT8x32_STORE_DEFINED
#endif

#ifndef VINT8x32_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 8, 32, 16)
# define VINT8x32_ADD_DEFINED
#endif

#ifndef VINT8x32_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 8, 32, 16)
# define VINT8x32_SUB_DEFINED
#endif

#ifndef VINT8x32_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 8, 32, 16)
# define VINT8x32_MUL_DEFINED
#endif

#ifndef VINT8x32_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 8, 32, 16)
# define VINT8x32_DIV_DEFINED
#endif

#ifndef VINT8x32_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 8, 32, 16)
# define VINT8x32_AVG_DEFINED
#endif

#ifndef VINT8x32_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 8, 32, 16)
# define VINT8x32_AND_DEFINED
#endif

#ifndef VINT8x32_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 8, 32, 16)
# define VINT8x32_OR_DEFINED
#endif

#ifndef VINT8x32_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 8, 32, 16)
# define VINT8x32_XOR_DEFINED
#endif

#ifndef VINT8x32_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 8, 32, 16)
# define VINT8x32_NOT_DEFINED
#endif

#ifndef VINT8x32_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 8, 32, 16)
# define VINT8x32_CMPLT_DEFINED
#endif

#ifndef VINT8x32_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 8, 32, 16)
# define VINT8x32_CMPEQ_DEFINED
#endif

#ifndef VINT8x32_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 8, 32, 16)
# define VINT8x32_CMPGT_DEFINED
#endif

#ifndef VINT8x32_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 8, 32, 16)
# define VINT8x32_CMPLE_DEFINED
#endif

#ifndef VINT8x32_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 8, 32, 16)
# define VINT8x32_CMPGE_DEFINED
#endif

#ifndef VINT8x32_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 8, 32, 16)
# define VINT8x32_MIN_DEFINED
#endif

#ifndef VINT8x32_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 8, 32, 16)
# define VINT8x32_MAX_DEFINED
#endif

#ifndef VINT8x32_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 8, 32, 16)
# define VINT8x32_RSHIFT_DEFINED
#endif

#ifndef VINT8x32_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 8, 32, 16)
# define VINT8x32_LRSHIFT_DEFINED
#endif

#ifndef VINT8x32_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 8, 32, 16)
# define VINT8x32_LSHIFT_DEFINED
#endif



/* vint8x32 */

#ifndef VUINT8x32_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 8, 32, 16)
# define VUINT8x32_SPLAT_DEFINED
#endif

#ifndef VUINT8x32_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 8, 32, 16)
# define VUINT8x32_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT8x32_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 8, 32, 16)
# define VUINT8x32_LOAD_DEFINED
#endif

#ifndef VUINT8x32_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 8, 32, 16)
# define VUINT8x32_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT8x32_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 8, 32, 16)
# define VUINT8x32_STORE_DEFINED
#endif

#ifndef VUINT8x32_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 8, 32, 16)
# define VUINT8x32_ADD_DEFINED
#endif

#ifndef VUINT8x32_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 8, 32, 16)
# define VUINT8x32_SUB_DEFINED
#endif

#ifndef VUINT8x32_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 8, 32, 16)
# define VUINT8x32_MUL_DEFINED
#endif

#ifndef VUINT8x32_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 8, 32, 16)
# define VUINT8x32_DIV_DEFINED
#endif

#ifndef VUINT8x32_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 8, 32, 16)
# define VUINT8x32_AVG_DEFINED
#endif

#ifndef VUINT8x32_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 8, 32, 16)
# define VUINT8x32_AND_DEFINED
#endif

#ifndef VUINT8x32_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 8, 32, 16)
# define VUINT8x32_OR_DEFINED
#endif

#ifndef VUINT8x32_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 8, 32, 16)
# define VUINT8x32_XOR_DEFINED
#endif

#ifndef VUINT8x32_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 8, 32, 16)
# define VUINT8x32_NOT_DEFINED
#endif

#ifndef VUINT8x32_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 8, 32, 16)
# define VUINT8x32_CMPLT_DEFINED
#endif

#ifndef VUINT8x32_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 8, 32, 16)
# define VUINT8x32_CMPEQ_DEFINED
#endif

#ifndef VUINT8x32_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 8, 32, 16)
# define VUINT8x32_CMPGT_DEFINED
#endif

#ifndef VUINT8x32_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 8, 32, 16)
# define VUINT8x32_CMPLE_DEFINED
#endif

#ifndef VUINT8x32_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 8, 32, 16)
# define VUINT8x32_CMPGE_DEFINED
#endif

#ifndef VUINT8x32_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 8, 32, 16)
# define VUINT8x32_MIN_DEFINED
#endif

#ifndef VUINT8x32_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 8, 32, 16)
# define VUINT8x32_MAX_DEFINED
#endif

#ifndef VUINT8x32_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 8, 32, 16)
# define VUINT8x32_RSHIFT_DEFINED
#endif

#ifndef VUINT8x32_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 8, 32, 16)
# define VUINT8x32_LRSHIFT_DEFINED
#endif

#ifndef VUINT8x32_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 8, 32, 16)
# define VUINT8x32_LSHIFT_DEFINED
#endif



/* vuint8x64 */

#ifndef VINT8x64_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 8, 64, 32)
# define VINT8x64_SPLAT_DEFINED
#endif

#ifndef VINT8x64_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 8, 64, 32)
# define VINT8x64_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT8x64_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 8, 64, 32)
# define VINT8x64_LOAD_DEFINED
#endif

#ifndef VINT8x64_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 8, 64, 32)
# define VINT8x64_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT8x64_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 8, 64, 32)
# define VINT8x64_STORE_DEFINED
#endif

#ifndef VINT8x64_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 8, 64, 32)
# define VINT8x64_ADD_DEFINED
#endif

#ifndef VINT8x64_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 8, 64, 32)
# define VINT8x64_SUB_DEFINED
#endif

#ifndef VINT8x64_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 8, 64, 32)
# define VINT8x64_MUL_DEFINED
#endif

#ifndef VINT8x64_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 8, 64, 32)
# define VINT8x64_DIV_DEFINED
#endif

#ifndef VINT8x64_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 8, 64, 32)
# define VINT8x64_AVG_DEFINED
#endif

#ifndef VINT8x64_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 8, 64, 32)
# define VINT8x64_AND_DEFINED
#endif

#ifndef VINT8x64_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 8, 64, 32)
# define VINT8x64_OR_DEFINED
#endif

#ifndef VINT8x64_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 8, 64, 32)
# define VINT8x64_XOR_DEFINED
#endif

#ifndef VINT8x64_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 8, 64, 32)
# define VINT8x64_NOT_DEFINED
#endif

#ifndef VINT8x64_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 8, 64, 32)
# define VINT8x64_CMPLT_DEFINED
#endif

#ifndef VINT8x64_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 8, 64, 32)
# define VINT8x64_CMPEQ_DEFINED
#endif

#ifndef VINT8x64_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 8, 64, 32)
# define VINT8x64_CMPGT_DEFINED
#endif

#ifndef VINT8x64_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 8, 64, 32)
# define VINT8x64_CMPLE_DEFINED
#endif

#ifndef VINT8x64_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 8, 64, 32)
# define VINT8x64_CMPGE_DEFINED
#endif

#ifndef VINT8x64_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 8, 64, 32)
# define VINT8x64_MIN_DEFINED
#endif

#ifndef VINT8x64_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 8, 64, 32)
# define VINT8x64_MAX_DEFINED
#endif

#ifndef VINT8x64_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 8, 64, 32)
# define VINT8x64_RSHIFT_DEFINED
#endif

#ifndef VINT8x64_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 8, 64, 32)
# define VINT8x64_LRSHIFT_DEFINED
#endif

#ifndef VINT8x64_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 8, 64, 32)
# define VINT8x64_LSHIFT_DEFINED
#endif



/* vint8x64 */

#ifndef VUINT8x64_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 8, 64, 32)
# define VUINT8x64_SPLAT_DEFINED
#endif

#ifndef VUINT8x64_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 8, 64, 32)
# define VUINT8x64_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT8x64_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 8, 64, 32)
# define VUINT8x64_LOAD_DEFINED
#endif

#ifndef VUINT8x64_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 8, 64, 32)
# define VUINT8x64_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT8x64_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 8, 64, 32)
# define VUINT8x64_STORE_DEFINED
#endif

#ifndef VUINT8x64_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 8, 64, 32)
# define VUINT8x64_ADD_DEFINED
#endif

#ifndef VUINT8x64_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 8, 64, 32)
# define VUINT8x64_SUB_DEFINED
#endif

#ifndef VUINT8x64_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 8, 64, 32)
# define VUINT8x64_MUL_DEFINED
#endif

#ifndef VUINT8x64_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 8, 64, 32)
# define VUINT8x64_DIV_DEFINED
#endif

#ifndef VUINT8x64_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 8, 64, 32)
# define VUINT8x64_AVG_DEFINED
#endif

#ifndef VUINT8x64_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 8, 64, 32)
# define VUINT8x64_AND_DEFINED
#endif

#ifndef VUINT8x64_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 8, 64, 32)
# define VUINT8x64_OR_DEFINED
#endif

#ifndef VUINT8x64_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 8, 64, 32)
# define VUINT8x64_XOR_DEFINED
#endif

#ifndef VUINT8x64_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 8, 64, 32)
# define VUINT8x64_NOT_DEFINED
#endif

#ifndef VUINT8x64_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 8, 64, 32)
# define VUINT8x64_CMPLT_DEFINED
#endif

#ifndef VUINT8x64_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 8, 64, 32)
# define VUINT8x64_CMPEQ_DEFINED
#endif

#ifndef VUINT8x64_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 8, 64, 32)
# define VUINT8x64_CMPGT_DEFINED
#endif

#ifndef VUINT8x64_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 8, 64, 32)
# define VUINT8x64_CMPLE_DEFINED
#endif

#ifndef VUINT8x64_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 8, 64, 32)
# define VUINT8x64_CMPGE_DEFINED
#endif

#ifndef VUINT8x64_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 8, 64, 32)
# define VUINT8x64_MIN_DEFINED
#endif

#ifndef VUINT8x64_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 8, 64, 32)
# define VUINT8x64_MAX_DEFINED
#endif

#ifndef VUINT8x64_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 8, 64, 32)
# define VUINT8x64_RSHIFT_DEFINED
#endif

#ifndef VUINT8x64_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 8, 64, 32)
# define VUINT8x64_LRSHIFT_DEFINED
#endif

#ifndef VUINT8x64_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 8, 64, 32)
# define VUINT8x64_LSHIFT_DEFINED
#endif



/* vuint16x2 */

#ifndef VINT16x2_SPLAT_DEFINED
VEC_GENERIC_SPLAT(/* nothing */, 16, 2)
# define VINT16x2_SPLAT_DEFINED
#endif
#ifndef VINT16x2_LOAD_ALIGNED_DEFINED
VEC_GENERIC_LOAD_ALIGNED(/* nothing */, 16, 2)
# define VINT16x2_LOAD_ALIGNED_DEFINED
#endif
#ifndef VINT16x2_LOAD_DEFINED
VEC_GENERIC_LOAD(/* nothing */, 16, 2)
# define VINT16x2_LOAD_DEFINED
#endif
#ifndef VINT16x2_STORE_ALIGNED_DEFINED
VEC_GENERIC_STORE_ALIGNED(/* nothing */, 16, 2)
# define VINT16x2_STORE_ALIGNED_DEFINED
#endif
#ifndef VINT16x2_STORE_DEFINED
VEC_GENERIC_STORE(/* nothing */, 16, 2)
# define VINT16x2_STORE_DEFINED
#endif
#ifndef VINT16x2_ADD_DEFINED
VEC_GENERIC_ADD(/* nothing */, 16, 2)
# define VINT16x2_ADD_DEFINED
#endif
#ifndef VINT16x2_SUB_DEFINED
VEC_GENERIC_SUB(/* nothing */, 16, 2)
# define VINT16x2_SUB_DEFINED
#endif
#ifndef VINT16x2_MUL_DEFINED
VEC_GENERIC_MUL(/* nothing */, 16, 2)
# define VINT16x2_MUL_DEFINED
#endif
#ifndef VINT16x2_DIV_DEFINED
VEC_GENERIC_DIV(/* nothing */, 16, 2)
# define VINT16x2_DIV_DEFINED
#endif
#ifndef VINT16x2_AVG_DEFINED
VEC_GENERIC_AVG(/* nothing */, 16, 2)
# define VINT16x2_AVG_DEFINED
#endif
#ifndef VINT16x2_AND_DEFINED
VEC_GENERIC_AND(/* nothing */, 16, 2)
# define VINT16x2_AND_DEFINED
#endif
#ifndef VINT16x2_OR_DEFINED
VEC_GENERIC_OR(/* nothing */, 16, 2)
# define VINT16x2_OR_DEFINED
#endif
#ifndef VINT16x2_XOR_DEFINED
VEC_GENERIC_XOR(/* nothing */, 16, 2)
# define VINT16x2_XOR_DEFINED
#endif
#ifndef VINT16x2_NOT_DEFINED
VEC_GENERIC_NOT(/* nothing */, 16, 2)
# define VINT16x2_NOT_DEFINED
#endif
#ifndef VINT16x2_CMPLT_DEFINED
VEC_GENERIC_CMPLT(/* nothing */, 16, 2)
# define VINT16x2_CMPLT_DEFINED
#endif
#ifndef VINT16x2_CMPEQ_DEFINED
VEC_GENERIC_CMPEQ(/* nothing */, 16, 2)
# define VINT16x2_CMPEQ_DEFINED
#endif
#ifndef VINT16x2_CMPGT_DEFINED
VEC_GENERIC_CMPGT(/* nothing */, 16, 2)
# define VINT16x2_CMPGT_DEFINED
#endif
#ifndef VINT16x2_CMPLE_DEFINED
VEC_GENERIC_CMPLE(/* nothing */, 16, 2)
# define VINT16x2_CMPLE_DEFINED
#endif
#ifndef VINT16x2_CMPGE_DEFINED
VEC_GENERIC_CMPGE(/* nothing */, 16, 2)
# define VINT16x2_CMPGE_DEFINED
#endif
#ifndef VINT16x2_MIN_DEFINED
VEC_GENERIC_MIN(/* nothing */, 16, 2)
# define VINT16x2_MIN_DEFINED
#endif
#ifndef VINT16x2_MAX_DEFINED
VEC_GENERIC_MAX(/* nothing */, 16, 2)
# define VINT16x2_MAX_DEFINED
#endif
#ifndef VINT16x2_RSHIFT_DEFINED
VEC_GENERIC_RSHIFT(/* nothing */, 16, 2)
# define VINT16x2_RSHIFT_DEFINED
#endif
#ifndef VINT16x2_LRSHIFT_DEFINED
VEC_GENERIC_LRSHIFT(/* nothing */, 16, 2)
# define VINT16x2_LRSHIFT_DEFINED
#endif
#ifndef VINT16x2_LSHIFT_DEFINED
VEC_GENERIC_LSHIFT(/* nothing */, 16, 2)
# define VINT16x2_LSHIFT_DEFINED
#endif


/* vint16x2 */

#ifndef VUINT16x2_SPLAT_DEFINED
VEC_GENERIC_SPLAT(u, 16, 2)
# define VUINT16x2_SPLAT_DEFINED
#endif
#ifndef VUINT16x2_LOAD_ALIGNED_DEFINED
VEC_GENERIC_LOAD_ALIGNED(u, 16, 2)
# define VUINT16x2_LOAD_ALIGNED_DEFINED
#endif
#ifndef VUINT16x2_LOAD_DEFINED
VEC_GENERIC_LOAD(u, 16, 2)
# define VUINT16x2_LOAD_DEFINED
#endif
#ifndef VUINT16x2_STORE_ALIGNED_DEFINED
VEC_GENERIC_STORE_ALIGNED(u, 16, 2)
# define VUINT16x2_STORE_ALIGNED_DEFINED
#endif
#ifndef VUINT16x2_STORE_DEFINED
VEC_GENERIC_STORE(u, 16, 2)
# define VUINT16x2_STORE_DEFINED
#endif
#ifndef VUINT16x2_ADD_DEFINED
VEC_GENERIC_ADD(u, 16, 2)
# define VUINT16x2_ADD_DEFINED
#endif
#ifndef VUINT16x2_SUB_DEFINED
VEC_GENERIC_SUB(u, 16, 2)
# define VUINT16x2_SUB_DEFINED
#endif
#ifndef VUINT16x2_MUL_DEFINED
VEC_GENERIC_MUL(u, 16, 2)
# define VUINT16x2_MUL_DEFINED
#endif
#ifndef VUINT16x2_DIV_DEFINED
VEC_GENERIC_DIV(u, 16, 2)
# define VUINT16x2_DIV_DEFINED
#endif
#ifndef VUINT16x2_AVG_DEFINED
VEC_GENERIC_AVG(u, 16, 2)
# define VUINT16x2_AVG_DEFINED
#endif
#ifndef VUINT16x2_AND_DEFINED
VEC_GENERIC_AND(u, 16, 2)
# define VUINT16x2_AND_DEFINED
#endif
#ifndef VUINT16x2_OR_DEFINED
VEC_GENERIC_OR(u, 16, 2)
# define VUINT16x2_OR_DEFINED
#endif
#ifndef VUINT16x2_XOR_DEFINED
VEC_GENERIC_XOR(u, 16, 2)
# define VUINT16x2_XOR_DEFINED
#endif
#ifndef VUINT16x2_NOT_DEFINED
VEC_GENERIC_NOT(u, 16, 2)
# define VUINT16x2_NOT_DEFINED
#endif
#ifndef VUINT16x2_CMPLT_DEFINED
VEC_GENERIC_CMPLT(u, 16, 2)
# define VUINT16x2_CMPLT_DEFINED
#endif
#ifndef VUINT16x2_CMPEQ_DEFINED
VEC_GENERIC_CMPEQ(u, 16, 2)
# define VUINT16x2_CMPEQ_DEFINED
#endif
#ifndef VUINT16x2_CMPGT_DEFINED
VEC_GENERIC_CMPGT(u, 16, 2)
# define VUINT16x2_CMPGT_DEFINED
#endif
#ifndef VUINT16x2_CMPLE_DEFINED
VEC_GENERIC_CMPLE(u, 16, 2)
# define VUINT16x2_CMPLE_DEFINED
#endif
#ifndef VUINT16x2_CMPGE_DEFINED
VEC_GENERIC_CMPGE(u, 16, 2)
# define VUINT16x2_CMPGE_DEFINED
#endif
#ifndef VUINT16x2_MIN_DEFINED
VEC_GENERIC_MIN(u, 16, 2)
# define VUINT16x2_MIN_DEFINED
#endif
#ifndef VUINT16x2_MAX_DEFINED
VEC_GENERIC_MAX(u, 16, 2)
# define VUINT16x2_MAX_DEFINED
#endif
#ifndef VUINT16x2_RSHIFT_DEFINED
VEC_GENERIC_RSHIFT(u, 16, 2)
# define VUINT16x2_RSHIFT_DEFINED
#endif
#ifndef VUINT16x2_LRSHIFT_DEFINED
VEC_GENERIC_LRSHIFT(u, 16, 2)
# define VUINT16x2_LRSHIFT_DEFINED
#endif
#ifndef VUINT16x2_LSHIFT_DEFINED
VEC_GENERIC_LSHIFT(u, 16, 2)
# define VUINT16x2_LSHIFT_DEFINED
#endif


/* vuint16x4 */

#ifndef VINT16x4_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 16, 4, 2)
# define VINT16x4_SPLAT_DEFINED
#endif

#ifndef VINT16x4_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 16, 4, 2)
# define VINT16x4_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT16x4_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 16, 4, 2)
# define VINT16x4_LOAD_DEFINED
#endif

#ifndef VINT16x4_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 16, 4, 2)
# define VINT16x4_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT16x4_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 16, 4, 2)
# define VINT16x4_STORE_DEFINED
#endif

#ifndef VINT16x4_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 16, 4, 2)
# define VINT16x4_ADD_DEFINED
#endif

#ifndef VINT16x4_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 16, 4, 2)
# define VINT16x4_SUB_DEFINED
#endif

#ifndef VINT16x4_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 16, 4, 2)
# define VINT16x4_MUL_DEFINED
#endif

#ifndef VINT16x4_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 16, 4, 2)
# define VINT16x4_DIV_DEFINED
#endif

#ifndef VINT16x4_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 16, 4, 2)
# define VINT16x4_AVG_DEFINED
#endif

#ifndef VINT16x4_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 16, 4, 2)
# define VINT16x4_AND_DEFINED
#endif

#ifndef VINT16x4_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 16, 4, 2)
# define VINT16x4_OR_DEFINED
#endif

#ifndef VINT16x4_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 16, 4, 2)
# define VINT16x4_XOR_DEFINED
#endif

#ifndef VINT16x4_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 16, 4, 2)
# define VINT16x4_NOT_DEFINED
#endif

#ifndef VINT16x4_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 16, 4, 2)
# define VINT16x4_CMPLT_DEFINED
#endif

#ifndef VINT16x4_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 16, 4, 2)
# define VINT16x4_CMPEQ_DEFINED
#endif

#ifndef VINT16x4_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 16, 4, 2)
# define VINT16x4_CMPGT_DEFINED
#endif

#ifndef VINT16x4_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 16, 4, 2)
# define VINT16x4_CMPLE_DEFINED
#endif

#ifndef VINT16x4_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 16, 4, 2)
# define VINT16x4_CMPGE_DEFINED
#endif

#ifndef VINT16x4_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 16, 4, 2)
# define VINT16x4_MIN_DEFINED
#endif

#ifndef VINT16x4_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 16, 4, 2)
# define VINT16x4_MAX_DEFINED
#endif

#ifndef VINT16x4_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 16, 4, 2)
# define VINT16x4_RSHIFT_DEFINED
#endif

#ifndef VINT16x4_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 16, 4, 2)
# define VINT16x4_LRSHIFT_DEFINED
#endif

#ifndef VINT16x4_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 16, 4, 2)
# define VINT16x4_LSHIFT_DEFINED
#endif



/* vint16x4 */

#ifndef VUINT16x4_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 16, 4, 2)
# define VUINT16x4_SPLAT_DEFINED
#endif

#ifndef VUINT16x4_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 16, 4, 2)
# define VUINT16x4_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT16x4_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 16, 4, 2)
# define VUINT16x4_LOAD_DEFINED
#endif

#ifndef VUINT16x4_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 16, 4, 2)
# define VUINT16x4_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT16x4_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 16, 4, 2)
# define VUINT16x4_STORE_DEFINED
#endif

#ifndef VUINT16x4_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 16, 4, 2)
# define VUINT16x4_ADD_DEFINED
#endif

#ifndef VUINT16x4_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 16, 4, 2)
# define VUINT16x4_SUB_DEFINED
#endif

#ifndef VUINT16x4_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 16, 4, 2)
# define VUINT16x4_MUL_DEFINED
#endif

#ifndef VUINT16x4_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 16, 4, 2)
# define VUINT16x4_DIV_DEFINED
#endif

#ifndef VUINT16x4_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 16, 4, 2)
# define VUINT16x4_AVG_DEFINED
#endif

#ifndef VUINT16x4_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 16, 4, 2)
# define VUINT16x4_AND_DEFINED
#endif

#ifndef VUINT16x4_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 16, 4, 2)
# define VUINT16x4_OR_DEFINED
#endif

#ifndef VUINT16x4_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 16, 4, 2)
# define VUINT16x4_XOR_DEFINED
#endif

#ifndef VUINT16x4_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 16, 4, 2)
# define VUINT16x4_NOT_DEFINED
#endif

#ifndef VUINT16x4_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 16, 4, 2)
# define VUINT16x4_CMPLT_DEFINED
#endif

#ifndef VUINT16x4_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 16, 4, 2)
# define VUINT16x4_CMPEQ_DEFINED
#endif

#ifndef VUINT16x4_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 16, 4, 2)
# define VUINT16x4_CMPGT_DEFINED
#endif

#ifndef VUINT16x4_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 16, 4, 2)
# define VUINT16x4_CMPLE_DEFINED
#endif

#ifndef VUINT16x4_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 16, 4, 2)
# define VUINT16x4_CMPGE_DEFINED
#endif

#ifndef VUINT16x4_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 16, 4, 2)
# define VUINT16x4_MIN_DEFINED
#endif

#ifndef VUINT16x4_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 16, 4, 2)
# define VUINT16x4_MAX_DEFINED
#endif

#ifndef VUINT16x4_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 16, 4, 2)
# define VUINT16x4_RSHIFT_DEFINED
#endif

#ifndef VUINT16x4_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 16, 4, 2)
# define VUINT16x4_LRSHIFT_DEFINED
#endif

#ifndef VUINT16x4_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 16, 4, 2)
# define VUINT16x4_LSHIFT_DEFINED
#endif



/* vuint16x8 */

#ifndef VINT16x8_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 16, 8, 4)
# define VINT16x8_SPLAT_DEFINED
#endif

#ifndef VINT16x8_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 16, 8, 4)
# define VINT16x8_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT16x8_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 16, 8, 4)
# define VINT16x8_LOAD_DEFINED
#endif

#ifndef VINT16x8_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 16, 8, 4)
# define VINT16x8_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT16x8_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 16, 8, 4)
# define VINT16x8_STORE_DEFINED
#endif

#ifndef VINT16x8_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 16, 8, 4)
# define VINT16x8_ADD_DEFINED
#endif

#ifndef VINT16x8_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 16, 8, 4)
# define VINT16x8_SUB_DEFINED
#endif

#ifndef VINT16x8_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 16, 8, 4)
# define VINT16x8_MUL_DEFINED
#endif

#ifndef VINT16x8_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 16, 8, 4)
# define VINT16x8_DIV_DEFINED
#endif

#ifndef VINT16x8_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 16, 8, 4)
# define VINT16x8_AVG_DEFINED
#endif

#ifndef VINT16x8_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 16, 8, 4)
# define VINT16x8_AND_DEFINED
#endif

#ifndef VINT16x8_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 16, 8, 4)
# define VINT16x8_OR_DEFINED
#endif

#ifndef VINT16x8_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 16, 8, 4)
# define VINT16x8_XOR_DEFINED
#endif

#ifndef VINT16x8_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 16, 8, 4)
# define VINT16x8_NOT_DEFINED
#endif

#ifndef VINT16x8_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 16, 8, 4)
# define VINT16x8_CMPLT_DEFINED
#endif

#ifndef VINT16x8_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 16, 8, 4)
# define VINT16x8_CMPEQ_DEFINED
#endif

#ifndef VINT16x8_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 16, 8, 4)
# define VINT16x8_CMPGT_DEFINED
#endif

#ifndef VINT16x8_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 16, 8, 4)
# define VINT16x8_CMPLE_DEFINED
#endif

#ifndef VINT16x8_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 16, 8, 4)
# define VINT16x8_CMPGE_DEFINED
#endif

#ifndef VINT16x8_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 16, 8, 4)
# define VINT16x8_MIN_DEFINED
#endif

#ifndef VINT16x8_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 16, 8, 4)
# define VINT16x8_MAX_DEFINED
#endif

#ifndef VINT16x8_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 16, 8, 4)
# define VINT16x8_RSHIFT_DEFINED
#endif

#ifndef VINT16x8_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 16, 8, 4)
# define VINT16x8_LRSHIFT_DEFINED
#endif

#ifndef VINT16x8_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 16, 8, 4)
# define VINT16x8_LSHIFT_DEFINED
#endif



/* vint16x8 */

#ifndef VUINT16x8_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 16, 8, 4)
# define VUINT16x8_SPLAT_DEFINED
#endif

#ifndef VUINT16x8_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 16, 8, 4)
# define VUINT16x8_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT16x8_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 16, 8, 4)
# define VUINT16x8_LOAD_DEFINED
#endif

#ifndef VUINT16x8_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 16, 8, 4)
# define VUINT16x8_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT16x8_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 16, 8, 4)
# define VUINT16x8_STORE_DEFINED
#endif

#ifndef VUINT16x8_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 16, 8, 4)
# define VUINT16x8_ADD_DEFINED
#endif

#ifndef VUINT16x8_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 16, 8, 4)
# define VUINT16x8_SUB_DEFINED
#endif

#ifndef VUINT16x8_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 16, 8, 4)
# define VUINT16x8_MUL_DEFINED
#endif

#ifndef VUINT16x8_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 16, 8, 4)
# define VUINT16x8_DIV_DEFINED
#endif

#ifndef VUINT16x8_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 16, 8, 4)
# define VUINT16x8_AVG_DEFINED
#endif

#ifndef VUINT16x8_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 16, 8, 4)
# define VUINT16x8_AND_DEFINED
#endif

#ifndef VUINT16x8_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 16, 8, 4)
# define VUINT16x8_OR_DEFINED
#endif

#ifndef VUINT16x8_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 16, 8, 4)
# define VUINT16x8_XOR_DEFINED
#endif

#ifndef VUINT16x8_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 16, 8, 4)
# define VUINT16x8_NOT_DEFINED
#endif

#ifndef VUINT16x8_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 16, 8, 4)
# define VUINT16x8_CMPLT_DEFINED
#endif

#ifndef VUINT16x8_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 16, 8, 4)
# define VUINT16x8_CMPEQ_DEFINED
#endif

#ifndef VUINT16x8_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 16, 8, 4)
# define VUINT16x8_CMPGT_DEFINED
#endif

#ifndef VUINT16x8_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 16, 8, 4)
# define VUINT16x8_CMPLE_DEFINED
#endif

#ifndef VUINT16x8_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 16, 8, 4)
# define VUINT16x8_CMPGE_DEFINED
#endif

#ifndef VUINT16x8_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 16, 8, 4)
# define VUINT16x8_MIN_DEFINED
#endif

#ifndef VUINT16x8_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 16, 8, 4)
# define VUINT16x8_MAX_DEFINED
#endif

#ifndef VUINT16x8_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 16, 8, 4)
# define VUINT16x8_RSHIFT_DEFINED
#endif

#ifndef VUINT16x8_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 16, 8, 4)
# define VUINT16x8_LRSHIFT_DEFINED
#endif

#ifndef VUINT16x8_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 16, 8, 4)
# define VUINT16x8_LSHIFT_DEFINED
#endif



/* vuint16x16 */

#ifndef VINT16x16_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 16, 16, 8)
# define VINT16x16_SPLAT_DEFINED
#endif

#ifndef VINT16x16_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 16, 16, 8)
# define VINT16x16_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT16x16_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 16, 16, 8)
# define VINT16x16_LOAD_DEFINED
#endif

#ifndef VINT16x16_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 16, 16, 8)
# define VINT16x16_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT16x16_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 16, 16, 8)
# define VINT16x16_STORE_DEFINED
#endif

#ifndef VINT16x16_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 16, 16, 8)
# define VINT16x16_ADD_DEFINED
#endif

#ifndef VINT16x16_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 16, 16, 8)
# define VINT16x16_SUB_DEFINED
#endif

#ifndef VINT16x16_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 16, 16, 8)
# define VINT16x16_MUL_DEFINED
#endif

#ifndef VINT16x16_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 16, 16, 8)
# define VINT16x16_DIV_DEFINED
#endif

#ifndef VINT16x16_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 16, 16, 8)
# define VINT16x16_AVG_DEFINED
#endif

#ifndef VINT16x16_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 16, 16, 8)
# define VINT16x16_AND_DEFINED
#endif

#ifndef VINT16x16_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 16, 16, 8)
# define VINT16x16_OR_DEFINED
#endif

#ifndef VINT16x16_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 16, 16, 8)
# define VINT16x16_XOR_DEFINED
#endif

#ifndef VINT16x16_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 16, 16, 8)
# define VINT16x16_NOT_DEFINED
#endif

#ifndef VINT16x16_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 16, 16, 8)
# define VINT16x16_CMPLT_DEFINED
#endif

#ifndef VINT16x16_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 16, 16, 8)
# define VINT16x16_CMPEQ_DEFINED
#endif

#ifndef VINT16x16_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 16, 16, 8)
# define VINT16x16_CMPGT_DEFINED
#endif

#ifndef VINT16x16_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 16, 16, 8)
# define VINT16x16_CMPLE_DEFINED
#endif

#ifndef VINT16x16_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 16, 16, 8)
# define VINT16x16_CMPGE_DEFINED
#endif

#ifndef VINT16x16_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 16, 16, 8)
# define VINT16x16_MIN_DEFINED
#endif

#ifndef VINT16x16_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 16, 16, 8)
# define VINT16x16_MAX_DEFINED
#endif

#ifndef VINT16x16_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 16, 16, 8)
# define VINT16x16_RSHIFT_DEFINED
#endif

#ifndef VINT16x16_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 16, 16, 8)
# define VINT16x16_LRSHIFT_DEFINED
#endif

#ifndef VINT16x16_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 16, 16, 8)
# define VINT16x16_LSHIFT_DEFINED
#endif



/* vint16x16 */

#ifndef VUINT16x16_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 16, 16, 8)
# define VUINT16x16_SPLAT_DEFINED
#endif

#ifndef VUINT16x16_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 16, 16, 8)
# define VUINT16x16_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT16x16_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 16, 16, 8)
# define VUINT16x16_LOAD_DEFINED
#endif

#ifndef VUINT16x16_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 16, 16, 8)
# define VUINT16x16_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT16x16_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 16, 16, 8)
# define VUINT16x16_STORE_DEFINED
#endif

#ifndef VUINT16x16_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 16, 16, 8)
# define VUINT16x16_ADD_DEFINED
#endif

#ifndef VUINT16x16_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 16, 16, 8)
# define VUINT16x16_SUB_DEFINED
#endif

#ifndef VUINT16x16_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 16, 16, 8)
# define VUINT16x16_MUL_DEFINED
#endif

#ifndef VUINT16x16_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 16, 16, 8)
# define VUINT16x16_DIV_DEFINED
#endif

#ifndef VUINT16x16_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 16, 16, 8)
# define VUINT16x16_AVG_DEFINED
#endif

#ifndef VUINT16x16_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 16, 16, 8)
# define VUINT16x16_AND_DEFINED
#endif

#ifndef VUINT16x16_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 16, 16, 8)
# define VUINT16x16_OR_DEFINED
#endif

#ifndef VUINT16x16_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 16, 16, 8)
# define VUINT16x16_XOR_DEFINED
#endif

#ifndef VUINT16x16_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 16, 16, 8)
# define VUINT16x16_NOT_DEFINED
#endif

#ifndef VUINT16x16_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 16, 16, 8)
# define VUINT16x16_CMPLT_DEFINED
#endif

#ifndef VUINT16x16_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 16, 16, 8)
# define VUINT16x16_CMPEQ_DEFINED
#endif

#ifndef VUINT16x16_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 16, 16, 8)
# define VUINT16x16_CMPGT_DEFINED
#endif

#ifndef VUINT16x16_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 16, 16, 8)
# define VUINT16x16_CMPLE_DEFINED
#endif

#ifndef VUINT16x16_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 16, 16, 8)
# define VUINT16x16_CMPGE_DEFINED
#endif

#ifndef VUINT16x16_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 16, 16, 8)
# define VUINT16x16_MIN_DEFINED
#endif

#ifndef VUINT16x16_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 16, 16, 8)
# define VUINT16x16_MAX_DEFINED
#endif

#ifndef VUINT16x16_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 16, 16, 8)
# define VUINT16x16_RSHIFT_DEFINED
#endif

#ifndef VUINT16x16_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 16, 16, 8)
# define VUINT16x16_LRSHIFT_DEFINED
#endif

#ifndef VUINT16x16_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 16, 16, 8)
# define VUINT16x16_LSHIFT_DEFINED
#endif



/* vuint16x32 */

#ifndef VINT16x32_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 16, 32, 16)
# define VINT16x32_SPLAT_DEFINED
#endif

#ifndef VINT16x32_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 16, 32, 16)
# define VINT16x32_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT16x32_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 16, 32, 16)
# define VINT16x32_LOAD_DEFINED
#endif

#ifndef VINT16x32_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 16, 32, 16)
# define VINT16x32_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT16x32_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 16, 32, 16)
# define VINT16x32_STORE_DEFINED
#endif

#ifndef VINT16x32_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 16, 32, 16)
# define VINT16x32_ADD_DEFINED
#endif

#ifndef VINT16x32_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 16, 32, 16)
# define VINT16x32_SUB_DEFINED
#endif

#ifndef VINT16x32_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 16, 32, 16)
# define VINT16x32_MUL_DEFINED
#endif

#ifndef VINT16x32_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 16, 32, 16)
# define VINT16x32_DIV_DEFINED
#endif

#ifndef VINT16x32_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 16, 32, 16)
# define VINT16x32_AVG_DEFINED
#endif

#ifndef VINT16x32_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 16, 32, 16)
# define VINT16x32_AND_DEFINED
#endif

#ifndef VINT16x32_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 16, 32, 16)
# define VINT16x32_OR_DEFINED
#endif

#ifndef VINT16x32_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 16, 32, 16)
# define VINT16x32_XOR_DEFINED
#endif

#ifndef VINT16x32_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 16, 32, 16)
# define VINT16x32_NOT_DEFINED
#endif

#ifndef VINT16x32_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 16, 32, 16)
# define VINT16x32_CMPLT_DEFINED
#endif

#ifndef VINT16x32_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 16, 32, 16)
# define VINT16x32_CMPEQ_DEFINED
#endif

#ifndef VINT16x32_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 16, 32, 16)
# define VINT16x32_CMPGT_DEFINED
#endif

#ifndef VINT16x32_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 16, 32, 16)
# define VINT16x32_CMPLE_DEFINED
#endif

#ifndef VINT16x32_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 16, 32, 16)
# define VINT16x32_CMPGE_DEFINED
#endif

#ifndef VINT16x32_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 16, 32, 16)
# define VINT16x32_MIN_DEFINED
#endif

#ifndef VINT16x32_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 16, 32, 16)
# define VINT16x32_MAX_DEFINED
#endif

#ifndef VINT16x32_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 16, 32, 16)
# define VINT16x32_RSHIFT_DEFINED
#endif

#ifndef VINT16x32_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 16, 32, 16)
# define VINT16x32_LRSHIFT_DEFINED
#endif

#ifndef VINT16x32_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 16, 32, 16)
# define VINT16x32_LSHIFT_DEFINED
#endif



/* vint16x32 */

#ifndef VUINT16x32_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 16, 32, 16)
# define VUINT16x32_SPLAT_DEFINED
#endif

#ifndef VUINT16x32_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 16, 32, 16)
# define VUINT16x32_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT16x32_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 16, 32, 16)
# define VUINT16x32_LOAD_DEFINED
#endif

#ifndef VUINT16x32_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 16, 32, 16)
# define VUINT16x32_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT16x32_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 16, 32, 16)
# define VUINT16x32_STORE_DEFINED
#endif

#ifndef VUINT16x32_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 16, 32, 16)
# define VUINT16x32_ADD_DEFINED
#endif

#ifndef VUINT16x32_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 16, 32, 16)
# define VUINT16x32_SUB_DEFINED
#endif

#ifndef VUINT16x32_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 16, 32, 16)
# define VUINT16x32_MUL_DEFINED
#endif

#ifndef VUINT16x32_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 16, 32, 16)
# define VUINT16x32_DIV_DEFINED
#endif

#ifndef VUINT16x32_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 16, 32, 16)
# define VUINT16x32_AVG_DEFINED
#endif

#ifndef VUINT16x32_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 16, 32, 16)
# define VUINT16x32_AND_DEFINED
#endif

#ifndef VUINT16x32_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 16, 32, 16)
# define VUINT16x32_OR_DEFINED
#endif

#ifndef VUINT16x32_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 16, 32, 16)
# define VUINT16x32_XOR_DEFINED
#endif

#ifndef VUINT16x32_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 16, 32, 16)
# define VUINT16x32_NOT_DEFINED
#endif

#ifndef VUINT16x32_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 16, 32, 16)
# define VUINT16x32_CMPLT_DEFINED
#endif

#ifndef VUINT16x32_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 16, 32, 16)
# define VUINT16x32_CMPEQ_DEFINED
#endif

#ifndef VUINT16x32_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 16, 32, 16)
# define VUINT16x32_CMPGT_DEFINED
#endif

#ifndef VUINT16x32_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 16, 32, 16)
# define VUINT16x32_CMPLE_DEFINED
#endif

#ifndef VUINT16x32_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 16, 32, 16)
# define VUINT16x32_CMPGE_DEFINED
#endif

#ifndef VUINT16x32_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 16, 32, 16)
# define VUINT16x32_MIN_DEFINED
#endif

#ifndef VUINT16x32_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 16, 32, 16)
# define VUINT16x32_MAX_DEFINED
#endif

#ifndef VUINT16x32_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 16, 32, 16)
# define VUINT16x32_RSHIFT_DEFINED
#endif

#ifndef VUINT16x32_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 16, 32, 16)
# define VUINT16x32_LRSHIFT_DEFINED
#endif

#ifndef VUINT16x32_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 16, 32, 16)
# define VUINT16x32_LSHIFT_DEFINED
#endif



/* vuint32x2 */

#ifndef VINT32x2_SPLAT_DEFINED
VEC_GENERIC_SPLAT(/* nothing */, 32, 2)
# define VINT32x2_SPLAT_DEFINED
#endif
#ifndef VINT32x2_LOAD_ALIGNED_DEFINED
VEC_GENERIC_LOAD_ALIGNED(/* nothing */, 32, 2)
# define VINT32x2_LOAD_ALIGNED_DEFINED
#endif
#ifndef VINT32x2_LOAD_DEFINED
VEC_GENERIC_LOAD(/* nothing */, 32, 2)
# define VINT32x2_LOAD_DEFINED
#endif
#ifndef VINT32x2_STORE_ALIGNED_DEFINED
VEC_GENERIC_STORE_ALIGNED(/* nothing */, 32, 2)
# define VINT32x2_STORE_ALIGNED_DEFINED
#endif
#ifndef VINT32x2_STORE_DEFINED
VEC_GENERIC_STORE(/* nothing */, 32, 2)
# define VINT32x2_STORE_DEFINED
#endif
#ifndef VINT32x2_ADD_DEFINED
VEC_GENERIC_ADD(/* nothing */, 32, 2)
# define VINT32x2_ADD_DEFINED
#endif
#ifndef VINT32x2_SUB_DEFINED
VEC_GENERIC_SUB(/* nothing */, 32, 2)
# define VINT32x2_SUB_DEFINED
#endif
#ifndef VINT32x2_MUL_DEFINED
VEC_GENERIC_MUL(/* nothing */, 32, 2)
# define VINT32x2_MUL_DEFINED
#endif
#ifndef VINT32x2_DIV_DEFINED
VEC_GENERIC_DIV(/* nothing */, 32, 2)
# define VINT32x2_DIV_DEFINED
#endif
#ifndef VINT32x2_AVG_DEFINED
VEC_GENERIC_AVG(/* nothing */, 32, 2)
# define VINT32x2_AVG_DEFINED
#endif
#ifndef VINT32x2_AND_DEFINED
VEC_GENERIC_AND(/* nothing */, 32, 2)
# define VINT32x2_AND_DEFINED
#endif
#ifndef VINT32x2_OR_DEFINED
VEC_GENERIC_OR(/* nothing */, 32, 2)
# define VINT32x2_OR_DEFINED
#endif
#ifndef VINT32x2_XOR_DEFINED
VEC_GENERIC_XOR(/* nothing */, 32, 2)
# define VINT32x2_XOR_DEFINED
#endif
#ifndef VINT32x2_NOT_DEFINED
VEC_GENERIC_NOT(/* nothing */, 32, 2)
# define VINT32x2_NOT_DEFINED
#endif
#ifndef VINT32x2_CMPLT_DEFINED
VEC_GENERIC_CMPLT(/* nothing */, 32, 2)
# define VINT32x2_CMPLT_DEFINED
#endif
#ifndef VINT32x2_CMPEQ_DEFINED
VEC_GENERIC_CMPEQ(/* nothing */, 32, 2)
# define VINT32x2_CMPEQ_DEFINED
#endif
#ifndef VINT32x2_CMPGT_DEFINED
VEC_GENERIC_CMPGT(/* nothing */, 32, 2)
# define VINT32x2_CMPGT_DEFINED
#endif
#ifndef VINT32x2_CMPLE_DEFINED
VEC_GENERIC_CMPLE(/* nothing */, 32, 2)
# define VINT32x2_CMPLE_DEFINED
#endif
#ifndef VINT32x2_CMPGE_DEFINED
VEC_GENERIC_CMPGE(/* nothing */, 32, 2)
# define VINT32x2_CMPGE_DEFINED
#endif
#ifndef VINT32x2_MIN_DEFINED
VEC_GENERIC_MIN(/* nothing */, 32, 2)
# define VINT32x2_MIN_DEFINED
#endif
#ifndef VINT32x2_MAX_DEFINED
VEC_GENERIC_MAX(/* nothing */, 32, 2)
# define VINT32x2_MAX_DEFINED
#endif
#ifndef VINT32x2_RSHIFT_DEFINED
VEC_GENERIC_RSHIFT(/* nothing */, 32, 2)
# define VINT32x2_RSHIFT_DEFINED
#endif
#ifndef VINT32x2_LRSHIFT_DEFINED
VEC_GENERIC_LRSHIFT(/* nothing */, 32, 2)
# define VINT32x2_LRSHIFT_DEFINED
#endif
#ifndef VINT32x2_LSHIFT_DEFINED
VEC_GENERIC_LSHIFT(/* nothing */, 32, 2)
# define VINT32x2_LSHIFT_DEFINED
#endif


/* vint32x2 */

#ifndef VUINT32x2_SPLAT_DEFINED
VEC_GENERIC_SPLAT(u, 32, 2)
# define VUINT32x2_SPLAT_DEFINED
#endif
#ifndef VUINT32x2_LOAD_ALIGNED_DEFINED
VEC_GENERIC_LOAD_ALIGNED(u, 32, 2)
# define VUINT32x2_LOAD_ALIGNED_DEFINED
#endif
#ifndef VUINT32x2_LOAD_DEFINED
VEC_GENERIC_LOAD(u, 32, 2)
# define VUINT32x2_LOAD_DEFINED
#endif
#ifndef VUINT32x2_STORE_ALIGNED_DEFINED
VEC_GENERIC_STORE_ALIGNED(u, 32, 2)
# define VUINT32x2_STORE_ALIGNED_DEFINED
#endif
#ifndef VUINT32x2_STORE_DEFINED
VEC_GENERIC_STORE(u, 32, 2)
# define VUINT32x2_STORE_DEFINED
#endif
#ifndef VUINT32x2_ADD_DEFINED
VEC_GENERIC_ADD(u, 32, 2)
# define VUINT32x2_ADD_DEFINED
#endif
#ifndef VUINT32x2_SUB_DEFINED
VEC_GENERIC_SUB(u, 32, 2)
# define VUINT32x2_SUB_DEFINED
#endif
#ifndef VUINT32x2_MUL_DEFINED
VEC_GENERIC_MUL(u, 32, 2)
# define VUINT32x2_MUL_DEFINED
#endif
#ifndef VUINT32x2_DIV_DEFINED
VEC_GENERIC_DIV(u, 32, 2)
# define VUINT32x2_DIV_DEFINED
#endif
#ifndef VUINT32x2_AVG_DEFINED
VEC_GENERIC_AVG(u, 32, 2)
# define VUINT32x2_AVG_DEFINED
#endif
#ifndef VUINT32x2_AND_DEFINED
VEC_GENERIC_AND(u, 32, 2)
# define VUINT32x2_AND_DEFINED
#endif
#ifndef VUINT32x2_OR_DEFINED
VEC_GENERIC_OR(u, 32, 2)
# define VUINT32x2_OR_DEFINED
#endif
#ifndef VUINT32x2_XOR_DEFINED
VEC_GENERIC_XOR(u, 32, 2)
# define VUINT32x2_XOR_DEFINED
#endif
#ifndef VUINT32x2_NOT_DEFINED
VEC_GENERIC_NOT(u, 32, 2)
# define VUINT32x2_NOT_DEFINED
#endif
#ifndef VUINT32x2_CMPLT_DEFINED
VEC_GENERIC_CMPLT(u, 32, 2)
# define VUINT32x2_CMPLT_DEFINED
#endif
#ifndef VUINT32x2_CMPEQ_DEFINED
VEC_GENERIC_CMPEQ(u, 32, 2)
# define VUINT32x2_CMPEQ_DEFINED
#endif
#ifndef VUINT32x2_CMPGT_DEFINED
VEC_GENERIC_CMPGT(u, 32, 2)
# define VUINT32x2_CMPGT_DEFINED
#endif
#ifndef VUINT32x2_CMPLE_DEFINED
VEC_GENERIC_CMPLE(u, 32, 2)
# define VUINT32x2_CMPLE_DEFINED
#endif
#ifndef VUINT32x2_CMPGE_DEFINED
VEC_GENERIC_CMPGE(u, 32, 2)
# define VUINT32x2_CMPGE_DEFINED
#endif
#ifndef VUINT32x2_MIN_DEFINED
VEC_GENERIC_MIN(u, 32, 2)
# define VUINT32x2_MIN_DEFINED
#endif
#ifndef VUINT32x2_MAX_DEFINED
VEC_GENERIC_MAX(u, 32, 2)
# define VUINT32x2_MAX_DEFINED
#endif
#ifndef VUINT32x2_RSHIFT_DEFINED
VEC_GENERIC_RSHIFT(u, 32, 2)
# define VUINT32x2_RSHIFT_DEFINED
#endif
#ifndef VUINT32x2_LRSHIFT_DEFINED
VEC_GENERIC_LRSHIFT(u, 32, 2)
# define VUINT32x2_LRSHIFT_DEFINED
#endif
#ifndef VUINT32x2_LSHIFT_DEFINED
VEC_GENERIC_LSHIFT(u, 32, 2)
# define VUINT32x2_LSHIFT_DEFINED
#endif


/* vuint32x4 */

#ifndef VINT32x4_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 32, 4, 2)
# define VINT32x4_SPLAT_DEFINED
#endif

#ifndef VINT32x4_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 32, 4, 2)
# define VINT32x4_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT32x4_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 32, 4, 2)
# define VINT32x4_LOAD_DEFINED
#endif

#ifndef VINT32x4_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 32, 4, 2)
# define VINT32x4_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT32x4_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 32, 4, 2)
# define VINT32x4_STORE_DEFINED
#endif

#ifndef VINT32x4_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 32, 4, 2)
# define VINT32x4_ADD_DEFINED
#endif

#ifndef VINT32x4_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 32, 4, 2)
# define VINT32x4_SUB_DEFINED
#endif

#ifndef VINT32x4_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 32, 4, 2)
# define VINT32x4_MUL_DEFINED
#endif

#ifndef VINT32x4_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 32, 4, 2)
# define VINT32x4_DIV_DEFINED
#endif

#ifndef VINT32x4_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 32, 4, 2)
# define VINT32x4_AVG_DEFINED
#endif

#ifndef VINT32x4_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 32, 4, 2)
# define VINT32x4_AND_DEFINED
#endif

#ifndef VINT32x4_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 32, 4, 2)
# define VINT32x4_OR_DEFINED
#endif

#ifndef VINT32x4_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 32, 4, 2)
# define VINT32x4_XOR_DEFINED
#endif

#ifndef VINT32x4_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 32, 4, 2)
# define VINT32x4_NOT_DEFINED
#endif

#ifndef VINT32x4_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 32, 4, 2)
# define VINT32x4_CMPLT_DEFINED
#endif

#ifndef VINT32x4_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 32, 4, 2)
# define VINT32x4_CMPEQ_DEFINED
#endif

#ifndef VINT32x4_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 32, 4, 2)
# define VINT32x4_CMPGT_DEFINED
#endif

#ifndef VINT32x4_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 32, 4, 2)
# define VINT32x4_CMPLE_DEFINED
#endif

#ifndef VINT32x4_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 32, 4, 2)
# define VINT32x4_CMPGE_DEFINED
#endif

#ifndef VINT32x4_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 32, 4, 2)
# define VINT32x4_MIN_DEFINED
#endif

#ifndef VINT32x4_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 32, 4, 2)
# define VINT32x4_MAX_DEFINED
#endif

#ifndef VINT32x4_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 32, 4, 2)
# define VINT32x4_RSHIFT_DEFINED
#endif

#ifndef VINT32x4_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 32, 4, 2)
# define VINT32x4_LRSHIFT_DEFINED
#endif

#ifndef VINT32x4_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 32, 4, 2)
# define VINT32x4_LSHIFT_DEFINED
#endif



/* vint32x4 */

#ifndef VUINT32x4_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 32, 4, 2)
# define VUINT32x4_SPLAT_DEFINED
#endif

#ifndef VUINT32x4_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 32, 4, 2)
# define VUINT32x4_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT32x4_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 32, 4, 2)
# define VUINT32x4_LOAD_DEFINED
#endif

#ifndef VUINT32x4_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 32, 4, 2)
# define VUINT32x4_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT32x4_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 32, 4, 2)
# define VUINT32x4_STORE_DEFINED
#endif

#ifndef VUINT32x4_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 32, 4, 2)
# define VUINT32x4_ADD_DEFINED
#endif

#ifndef VUINT32x4_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 32, 4, 2)
# define VUINT32x4_SUB_DEFINED
#endif

#ifndef VUINT32x4_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 32, 4, 2)
# define VUINT32x4_MUL_DEFINED
#endif

#ifndef VUINT32x4_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 32, 4, 2)
# define VUINT32x4_DIV_DEFINED
#endif

#ifndef VUINT32x4_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 32, 4, 2)
# define VUINT32x4_AVG_DEFINED
#endif

#ifndef VUINT32x4_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 32, 4, 2)
# define VUINT32x4_AND_DEFINED
#endif

#ifndef VUINT32x4_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 32, 4, 2)
# define VUINT32x4_OR_DEFINED
#endif

#ifndef VUINT32x4_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 32, 4, 2)
# define VUINT32x4_XOR_DEFINED
#endif

#ifndef VUINT32x4_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 32, 4, 2)
# define VUINT32x4_NOT_DEFINED
#endif

#ifndef VUINT32x4_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 32, 4, 2)
# define VUINT32x4_CMPLT_DEFINED
#endif

#ifndef VUINT32x4_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 32, 4, 2)
# define VUINT32x4_CMPEQ_DEFINED
#endif

#ifndef VUINT32x4_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 32, 4, 2)
# define VUINT32x4_CMPGT_DEFINED
#endif

#ifndef VUINT32x4_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 32, 4, 2)
# define VUINT32x4_CMPLE_DEFINED
#endif

#ifndef VUINT32x4_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 32, 4, 2)
# define VUINT32x4_CMPGE_DEFINED
#endif

#ifndef VUINT32x4_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 32, 4, 2)
# define VUINT32x4_MIN_DEFINED
#endif

#ifndef VUINT32x4_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 32, 4, 2)
# define VUINT32x4_MAX_DEFINED
#endif

#ifndef VUINT32x4_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 32, 4, 2)
# define VUINT32x4_RSHIFT_DEFINED
#endif

#ifndef VUINT32x4_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 32, 4, 2)
# define VUINT32x4_LRSHIFT_DEFINED
#endif

#ifndef VUINT32x4_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 32, 4, 2)
# define VUINT32x4_LSHIFT_DEFINED
#endif



/* vuint32x8 */

#ifndef VINT32x8_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 32, 8, 4)
# define VINT32x8_SPLAT_DEFINED
#endif

#ifndef VINT32x8_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 32, 8, 4)
# define VINT32x8_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT32x8_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 32, 8, 4)
# define VINT32x8_LOAD_DEFINED
#endif

#ifndef VINT32x8_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 32, 8, 4)
# define VINT32x8_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT32x8_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 32, 8, 4)
# define VINT32x8_STORE_DEFINED
#endif

#ifndef VINT32x8_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 32, 8, 4)
# define VINT32x8_ADD_DEFINED
#endif

#ifndef VINT32x8_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 32, 8, 4)
# define VINT32x8_SUB_DEFINED
#endif

#ifndef VINT32x8_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 32, 8, 4)
# define VINT32x8_MUL_DEFINED
#endif

#ifndef VINT32x8_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 32, 8, 4)
# define VINT32x8_DIV_DEFINED
#endif

#ifndef VINT32x8_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 32, 8, 4)
# define VINT32x8_AVG_DEFINED
#endif

#ifndef VINT32x8_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 32, 8, 4)
# define VINT32x8_AND_DEFINED
#endif

#ifndef VINT32x8_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 32, 8, 4)
# define VINT32x8_OR_DEFINED
#endif

#ifndef VINT32x8_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 32, 8, 4)
# define VINT32x8_XOR_DEFINED
#endif

#ifndef VINT32x8_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 32, 8, 4)
# define VINT32x8_NOT_DEFINED
#endif

#ifndef VINT32x8_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 32, 8, 4)
# define VINT32x8_CMPLT_DEFINED
#endif

#ifndef VINT32x8_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 32, 8, 4)
# define VINT32x8_CMPEQ_DEFINED
#endif

#ifndef VINT32x8_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 32, 8, 4)
# define VINT32x8_CMPGT_DEFINED
#endif

#ifndef VINT32x8_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 32, 8, 4)
# define VINT32x8_CMPLE_DEFINED
#endif

#ifndef VINT32x8_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 32, 8, 4)
# define VINT32x8_CMPGE_DEFINED
#endif

#ifndef VINT32x8_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 32, 8, 4)
# define VINT32x8_MIN_DEFINED
#endif

#ifndef VINT32x8_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 32, 8, 4)
# define VINT32x8_MAX_DEFINED
#endif

#ifndef VINT32x8_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 32, 8, 4)
# define VINT32x8_RSHIFT_DEFINED
#endif

#ifndef VINT32x8_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 32, 8, 4)
# define VINT32x8_LRSHIFT_DEFINED
#endif

#ifndef VINT32x8_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 32, 8, 4)
# define VINT32x8_LSHIFT_DEFINED
#endif



/* vint32x8 */

#ifndef VUINT32x8_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 32, 8, 4)
# define VUINT32x8_SPLAT_DEFINED
#endif

#ifndef VUINT32x8_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 32, 8, 4)
# define VUINT32x8_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT32x8_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 32, 8, 4)
# define VUINT32x8_LOAD_DEFINED
#endif

#ifndef VUINT32x8_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 32, 8, 4)
# define VUINT32x8_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT32x8_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 32, 8, 4)
# define VUINT32x8_STORE_DEFINED
#endif

#ifndef VUINT32x8_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 32, 8, 4)
# define VUINT32x8_ADD_DEFINED
#endif

#ifndef VUINT32x8_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 32, 8, 4)
# define VUINT32x8_SUB_DEFINED
#endif

#ifndef VUINT32x8_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 32, 8, 4)
# define VUINT32x8_MUL_DEFINED
#endif

#ifndef VUINT32x8_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 32, 8, 4)
# define VUINT32x8_DIV_DEFINED
#endif

#ifndef VUINT32x8_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 32, 8, 4)
# define VUINT32x8_AVG_DEFINED
#endif

#ifndef VUINT32x8_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 32, 8, 4)
# define VUINT32x8_AND_DEFINED
#endif

#ifndef VUINT32x8_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 32, 8, 4)
# define VUINT32x8_OR_DEFINED
#endif

#ifndef VUINT32x8_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 32, 8, 4)
# define VUINT32x8_XOR_DEFINED
#endif

#ifndef VUINT32x8_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 32, 8, 4)
# define VUINT32x8_NOT_DEFINED
#endif

#ifndef VUINT32x8_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 32, 8, 4)
# define VUINT32x8_CMPLT_DEFINED
#endif

#ifndef VUINT32x8_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 32, 8, 4)
# define VUINT32x8_CMPEQ_DEFINED
#endif

#ifndef VUINT32x8_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 32, 8, 4)
# define VUINT32x8_CMPGT_DEFINED
#endif

#ifndef VUINT32x8_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 32, 8, 4)
# define VUINT32x8_CMPLE_DEFINED
#endif

#ifndef VUINT32x8_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 32, 8, 4)
# define VUINT32x8_CMPGE_DEFINED
#endif

#ifndef VUINT32x8_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 32, 8, 4)
# define VUINT32x8_MIN_DEFINED
#endif

#ifndef VUINT32x8_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 32, 8, 4)
# define VUINT32x8_MAX_DEFINED
#endif

#ifndef VUINT32x8_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 32, 8, 4)
# define VUINT32x8_RSHIFT_DEFINED
#endif

#ifndef VUINT32x8_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 32, 8, 4)
# define VUINT32x8_LRSHIFT_DEFINED
#endif

#ifndef VUINT32x8_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 32, 8, 4)
# define VUINT32x8_LSHIFT_DEFINED
#endif



/* vuint32x16 */

#ifndef VINT32x16_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 32, 16, 8)
# define VINT32x16_SPLAT_DEFINED
#endif

#ifndef VINT32x16_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 32, 16, 8)
# define VINT32x16_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT32x16_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 32, 16, 8)
# define VINT32x16_LOAD_DEFINED
#endif

#ifndef VINT32x16_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 32, 16, 8)
# define VINT32x16_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT32x16_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 32, 16, 8)
# define VINT32x16_STORE_DEFINED
#endif

#ifndef VINT32x16_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 32, 16, 8)
# define VINT32x16_ADD_DEFINED
#endif

#ifndef VINT32x16_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 32, 16, 8)
# define VINT32x16_SUB_DEFINED
#endif

#ifndef VINT32x16_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 32, 16, 8)
# define VINT32x16_MUL_DEFINED
#endif

#ifndef VINT32x16_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 32, 16, 8)
# define VINT32x16_DIV_DEFINED
#endif

#ifndef VINT32x16_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 32, 16, 8)
# define VINT32x16_AVG_DEFINED
#endif

#ifndef VINT32x16_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 32, 16, 8)
# define VINT32x16_AND_DEFINED
#endif

#ifndef VINT32x16_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 32, 16, 8)
# define VINT32x16_OR_DEFINED
#endif

#ifndef VINT32x16_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 32, 16, 8)
# define VINT32x16_XOR_DEFINED
#endif

#ifndef VINT32x16_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 32, 16, 8)
# define VINT32x16_NOT_DEFINED
#endif

#ifndef VINT32x16_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 32, 16, 8)
# define VINT32x16_CMPLT_DEFINED
#endif

#ifndef VINT32x16_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 32, 16, 8)
# define VINT32x16_CMPEQ_DEFINED
#endif

#ifndef VINT32x16_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 32, 16, 8)
# define VINT32x16_CMPGT_DEFINED
#endif

#ifndef VINT32x16_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 32, 16, 8)
# define VINT32x16_CMPLE_DEFINED
#endif

#ifndef VINT32x16_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 32, 16, 8)
# define VINT32x16_CMPGE_DEFINED
#endif

#ifndef VINT32x16_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 32, 16, 8)
# define VINT32x16_MIN_DEFINED
#endif

#ifndef VINT32x16_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 32, 16, 8)
# define VINT32x16_MAX_DEFINED
#endif

#ifndef VINT32x16_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 32, 16, 8)
# define VINT32x16_RSHIFT_DEFINED
#endif

#ifndef VINT32x16_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 32, 16, 8)
# define VINT32x16_LRSHIFT_DEFINED
#endif

#ifndef VINT32x16_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 32, 16, 8)
# define VINT32x16_LSHIFT_DEFINED
#endif



/* vint32x16 */

#ifndef VUINT32x16_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 32, 16, 8)
# define VUINT32x16_SPLAT_DEFINED
#endif

#ifndef VUINT32x16_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 32, 16, 8)
# define VUINT32x16_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT32x16_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 32, 16, 8)
# define VUINT32x16_LOAD_DEFINED
#endif

#ifndef VUINT32x16_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 32, 16, 8)
# define VUINT32x16_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT32x16_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 32, 16, 8)
# define VUINT32x16_STORE_DEFINED
#endif

#ifndef VUINT32x16_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 32, 16, 8)
# define VUINT32x16_ADD_DEFINED
#endif

#ifndef VUINT32x16_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 32, 16, 8)
# define VUINT32x16_SUB_DEFINED
#endif

#ifndef VUINT32x16_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 32, 16, 8)
# define VUINT32x16_MUL_DEFINED
#endif

#ifndef VUINT32x16_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 32, 16, 8)
# define VUINT32x16_DIV_DEFINED
#endif

#ifndef VUINT32x16_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 32, 16, 8)
# define VUINT32x16_AVG_DEFINED
#endif

#ifndef VUINT32x16_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 32, 16, 8)
# define VUINT32x16_AND_DEFINED
#endif

#ifndef VUINT32x16_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 32, 16, 8)
# define VUINT32x16_OR_DEFINED
#endif

#ifndef VUINT32x16_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 32, 16, 8)
# define VUINT32x16_XOR_DEFINED
#endif

#ifndef VUINT32x16_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 32, 16, 8)
# define VUINT32x16_NOT_DEFINED
#endif

#ifndef VUINT32x16_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 32, 16, 8)
# define VUINT32x16_CMPLT_DEFINED
#endif

#ifndef VUINT32x16_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 32, 16, 8)
# define VUINT32x16_CMPEQ_DEFINED
#endif

#ifndef VUINT32x16_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 32, 16, 8)
# define VUINT32x16_CMPGT_DEFINED
#endif

#ifndef VUINT32x16_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 32, 16, 8)
# define VUINT32x16_CMPLE_DEFINED
#endif

#ifndef VUINT32x16_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 32, 16, 8)
# define VUINT32x16_CMPGE_DEFINED
#endif

#ifndef VUINT32x16_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 32, 16, 8)
# define VUINT32x16_MIN_DEFINED
#endif

#ifndef VUINT32x16_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 32, 16, 8)
# define VUINT32x16_MAX_DEFINED
#endif

#ifndef VUINT32x16_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 32, 16, 8)
# define VUINT32x16_RSHIFT_DEFINED
#endif

#ifndef VUINT32x16_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 32, 16, 8)
# define VUINT32x16_LRSHIFT_DEFINED
#endif

#ifndef VUINT32x16_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 32, 16, 8)
# define VUINT32x16_LSHIFT_DEFINED
#endif



/* vuint64x2 */

#ifndef VINT64x2_SPLAT_DEFINED
VEC_GENERIC_SPLAT(/* nothing */, 64, 2)
# define VINT64x2_SPLAT_DEFINED
#endif
#ifndef VINT64x2_LOAD_ALIGNED_DEFINED
VEC_GENERIC_LOAD_ALIGNED(/* nothing */, 64, 2)
# define VINT64x2_LOAD_ALIGNED_DEFINED
#endif
#ifndef VINT64x2_LOAD_DEFINED
VEC_GENERIC_LOAD(/* nothing */, 64, 2)
# define VINT64x2_LOAD_DEFINED
#endif
#ifndef VINT64x2_STORE_ALIGNED_DEFINED
VEC_GENERIC_STORE_ALIGNED(/* nothing */, 64, 2)
# define VINT64x2_STORE_ALIGNED_DEFINED
#endif
#ifndef VINT64x2_STORE_DEFINED
VEC_GENERIC_STORE(/* nothing */, 64, 2)
# define VINT64x2_STORE_DEFINED
#endif
#ifndef VINT64x2_ADD_DEFINED
VEC_GENERIC_ADD(/* nothing */, 64, 2)
# define VINT64x2_ADD_DEFINED
#endif
#ifndef VINT64x2_SUB_DEFINED
VEC_GENERIC_SUB(/* nothing */, 64, 2)
# define VINT64x2_SUB_DEFINED
#endif
#ifndef VINT64x2_MUL_DEFINED
VEC_GENERIC_MUL(/* nothing */, 64, 2)
# define VINT64x2_MUL_DEFINED
#endif
#ifndef VINT64x2_DIV_DEFINED
VEC_GENERIC_DIV(/* nothing */, 64, 2)
# define VINT64x2_DIV_DEFINED
#endif
#ifndef VINT64x2_AVG_DEFINED
VEC_GENERIC_AVG(/* nothing */, 64, 2)
# define VINT64x2_AVG_DEFINED
#endif
#ifndef VINT64x2_AND_DEFINED
VEC_GENERIC_AND(/* nothing */, 64, 2)
# define VINT64x2_AND_DEFINED
#endif
#ifndef VINT64x2_OR_DEFINED
VEC_GENERIC_OR(/* nothing */, 64, 2)
# define VINT64x2_OR_DEFINED
#endif
#ifndef VINT64x2_XOR_DEFINED
VEC_GENERIC_XOR(/* nothing */, 64, 2)
# define VINT64x2_XOR_DEFINED
#endif
#ifndef VINT64x2_NOT_DEFINED
VEC_GENERIC_NOT(/* nothing */, 64, 2)
# define VINT64x2_NOT_DEFINED
#endif
#ifndef VINT64x2_CMPLT_DEFINED
VEC_GENERIC_CMPLT(/* nothing */, 64, 2)
# define VINT64x2_CMPLT_DEFINED
#endif
#ifndef VINT64x2_CMPEQ_DEFINED
VEC_GENERIC_CMPEQ(/* nothing */, 64, 2)
# define VINT64x2_CMPEQ_DEFINED
#endif
#ifndef VINT64x2_CMPGT_DEFINED
VEC_GENERIC_CMPGT(/* nothing */, 64, 2)
# define VINT64x2_CMPGT_DEFINED
#endif
#ifndef VINT64x2_CMPLE_DEFINED
VEC_GENERIC_CMPLE(/* nothing */, 64, 2)
# define VINT64x2_CMPLE_DEFINED
#endif
#ifndef VINT64x2_CMPGE_DEFINED
VEC_GENERIC_CMPGE(/* nothing */, 64, 2)
# define VINT64x2_CMPGE_DEFINED
#endif
#ifndef VINT64x2_MIN_DEFINED
VEC_GENERIC_MIN(/* nothing */, 64, 2)
# define VINT64x2_MIN_DEFINED
#endif
#ifndef VINT64x2_MAX_DEFINED
VEC_GENERIC_MAX(/* nothing */, 64, 2)
# define VINT64x2_MAX_DEFINED
#endif
#ifndef VINT64x2_RSHIFT_DEFINED
VEC_GENERIC_RSHIFT(/* nothing */, 64, 2)
# define VINT64x2_RSHIFT_DEFINED
#endif
#ifndef VINT64x2_LRSHIFT_DEFINED
VEC_GENERIC_LRSHIFT(/* nothing */, 64, 2)
# define VINT64x2_LRSHIFT_DEFINED
#endif
#ifndef VINT64x2_LSHIFT_DEFINED
VEC_GENERIC_LSHIFT(/* nothing */, 64, 2)
# define VINT64x2_LSHIFT_DEFINED
#endif


/* vint64x2 */

#ifndef VUINT64x2_SPLAT_DEFINED
VEC_GENERIC_SPLAT(u, 64, 2)
# define VUINT64x2_SPLAT_DEFINED
#endif
#ifndef VUINT64x2_LOAD_ALIGNED_DEFINED
VEC_GENERIC_LOAD_ALIGNED(u, 64, 2)
# define VUINT64x2_LOAD_ALIGNED_DEFINED
#endif
#ifndef VUINT64x2_LOAD_DEFINED
VEC_GENERIC_LOAD(u, 64, 2)
# define VUINT64x2_LOAD_DEFINED
#endif
#ifndef VUINT64x2_STORE_ALIGNED_DEFINED
VEC_GENERIC_STORE_ALIGNED(u, 64, 2)
# define VUINT64x2_STORE_ALIGNED_DEFINED
#endif
#ifndef VUINT64x2_STORE_DEFINED
VEC_GENERIC_STORE(u, 64, 2)
# define VUINT64x2_STORE_DEFINED
#endif
#ifndef VUINT64x2_ADD_DEFINED
VEC_GENERIC_ADD(u, 64, 2)
# define VUINT64x2_ADD_DEFINED
#endif
#ifndef VUINT64x2_SUB_DEFINED
VEC_GENERIC_SUB(u, 64, 2)
# define VUINT64x2_SUB_DEFINED
#endif
#ifndef VUINT64x2_MUL_DEFINED
VEC_GENERIC_MUL(u, 64, 2)
# define VUINT64x2_MUL_DEFINED
#endif
#ifndef VUINT64x2_DIV_DEFINED
VEC_GENERIC_DIV(u, 64, 2)
# define VUINT64x2_DIV_DEFINED
#endif
#ifndef VUINT64x2_AVG_DEFINED
VEC_GENERIC_AVG(u, 64, 2)
# define VUINT64x2_AVG_DEFINED
#endif
#ifndef VUINT64x2_AND_DEFINED
VEC_GENERIC_AND(u, 64, 2)
# define VUINT64x2_AND_DEFINED
#endif
#ifndef VUINT64x2_OR_DEFINED
VEC_GENERIC_OR(u, 64, 2)
# define VUINT64x2_OR_DEFINED
#endif
#ifndef VUINT64x2_XOR_DEFINED
VEC_GENERIC_XOR(u, 64, 2)
# define VUINT64x2_XOR_DEFINED
#endif
#ifndef VUINT64x2_NOT_DEFINED
VEC_GENERIC_NOT(u, 64, 2)
# define VUINT64x2_NOT_DEFINED
#endif
#ifndef VUINT64x2_CMPLT_DEFINED
VEC_GENERIC_CMPLT(u, 64, 2)
# define VUINT64x2_CMPLT_DEFINED
#endif
#ifndef VUINT64x2_CMPEQ_DEFINED
VEC_GENERIC_CMPEQ(u, 64, 2)
# define VUINT64x2_CMPEQ_DEFINED
#endif
#ifndef VUINT64x2_CMPGT_DEFINED
VEC_GENERIC_CMPGT(u, 64, 2)
# define VUINT64x2_CMPGT_DEFINED
#endif
#ifndef VUINT64x2_CMPLE_DEFINED
VEC_GENERIC_CMPLE(u, 64, 2)
# define VUINT64x2_CMPLE_DEFINED
#endif
#ifndef VUINT64x2_CMPGE_DEFINED
VEC_GENERIC_CMPGE(u, 64, 2)
# define VUINT64x2_CMPGE_DEFINED
#endif
#ifndef VUINT64x2_MIN_DEFINED
VEC_GENERIC_MIN(u, 64, 2)
# define VUINT64x2_MIN_DEFINED
#endif
#ifndef VUINT64x2_MAX_DEFINED
VEC_GENERIC_MAX(u, 64, 2)
# define VUINT64x2_MAX_DEFINED
#endif
#ifndef VUINT64x2_RSHIFT_DEFINED
VEC_GENERIC_RSHIFT(u, 64, 2)
# define VUINT64x2_RSHIFT_DEFINED
#endif
#ifndef VUINT64x2_LRSHIFT_DEFINED
VEC_GENERIC_LRSHIFT(u, 64, 2)
# define VUINT64x2_LRSHIFT_DEFINED
#endif
#ifndef VUINT64x2_LSHIFT_DEFINED
VEC_GENERIC_LSHIFT(u, 64, 2)
# define VUINT64x2_LSHIFT_DEFINED
#endif


/* vuint64x4 */

#ifndef VINT64x4_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 64, 4, 2)
# define VINT64x4_SPLAT_DEFINED
#endif

#ifndef VINT64x4_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 64, 4, 2)
# define VINT64x4_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT64x4_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 64, 4, 2)
# define VINT64x4_LOAD_DEFINED
#endif

#ifndef VINT64x4_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 64, 4, 2)
# define VINT64x4_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT64x4_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 64, 4, 2)
# define VINT64x4_STORE_DEFINED
#endif

#ifndef VINT64x4_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 64, 4, 2)
# define VINT64x4_ADD_DEFINED
#endif

#ifndef VINT64x4_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 64, 4, 2)
# define VINT64x4_SUB_DEFINED
#endif

#ifndef VINT64x4_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 64, 4, 2)
# define VINT64x4_MUL_DEFINED
#endif

#ifndef VINT64x4_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 64, 4, 2)
# define VINT64x4_DIV_DEFINED
#endif

#ifndef VINT64x4_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 64, 4, 2)
# define VINT64x4_AVG_DEFINED
#endif

#ifndef VINT64x4_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 64, 4, 2)
# define VINT64x4_AND_DEFINED
#endif

#ifndef VINT64x4_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 64, 4, 2)
# define VINT64x4_OR_DEFINED
#endif

#ifndef VINT64x4_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 64, 4, 2)
# define VINT64x4_XOR_DEFINED
#endif

#ifndef VINT64x4_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 64, 4, 2)
# define VINT64x4_NOT_DEFINED
#endif

#ifndef VINT64x4_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 64, 4, 2)
# define VINT64x4_CMPLT_DEFINED
#endif

#ifndef VINT64x4_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 64, 4, 2)
# define VINT64x4_CMPEQ_DEFINED
#endif

#ifndef VINT64x4_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 64, 4, 2)
# define VINT64x4_CMPGT_DEFINED
#endif

#ifndef VINT64x4_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 64, 4, 2)
# define VINT64x4_CMPLE_DEFINED
#endif

#ifndef VINT64x4_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 64, 4, 2)
# define VINT64x4_CMPGE_DEFINED
#endif

#ifndef VINT64x4_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 64, 4, 2)
# define VINT64x4_MIN_DEFINED
#endif

#ifndef VINT64x4_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 64, 4, 2)
# define VINT64x4_MAX_DEFINED
#endif

#ifndef VINT64x4_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 64, 4, 2)
# define VINT64x4_RSHIFT_DEFINED
#endif

#ifndef VINT64x4_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 64, 4, 2)
# define VINT64x4_LRSHIFT_DEFINED
#endif

#ifndef VINT64x4_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 64, 4, 2)
# define VINT64x4_LSHIFT_DEFINED
#endif



/* vint64x4 */

#ifndef VUINT64x4_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 64, 4, 2)
# define VUINT64x4_SPLAT_DEFINED
#endif

#ifndef VUINT64x4_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 64, 4, 2)
# define VUINT64x4_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT64x4_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 64, 4, 2)
# define VUINT64x4_LOAD_DEFINED
#endif

#ifndef VUINT64x4_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 64, 4, 2)
# define VUINT64x4_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT64x4_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 64, 4, 2)
# define VUINT64x4_STORE_DEFINED
#endif

#ifndef VUINT64x4_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 64, 4, 2)
# define VUINT64x4_ADD_DEFINED
#endif

#ifndef VUINT64x4_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 64, 4, 2)
# define VUINT64x4_SUB_DEFINED
#endif

#ifndef VUINT64x4_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 64, 4, 2)
# define VUINT64x4_MUL_DEFINED
#endif

#ifndef VUINT64x4_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 64, 4, 2)
# define VUINT64x4_DIV_DEFINED
#endif

#ifndef VUINT64x4_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 64, 4, 2)
# define VUINT64x4_AVG_DEFINED
#endif

#ifndef VUINT64x4_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 64, 4, 2)
# define VUINT64x4_AND_DEFINED
#endif

#ifndef VUINT64x4_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 64, 4, 2)
# define VUINT64x4_OR_DEFINED
#endif

#ifndef VUINT64x4_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 64, 4, 2)
# define VUINT64x4_XOR_DEFINED
#endif

#ifndef VUINT64x4_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 64, 4, 2)
# define VUINT64x4_NOT_DEFINED
#endif

#ifndef VUINT64x4_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 64, 4, 2)
# define VUINT64x4_CMPLT_DEFINED
#endif

#ifndef VUINT64x4_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 64, 4, 2)
# define VUINT64x4_CMPEQ_DEFINED
#endif

#ifndef VUINT64x4_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 64, 4, 2)
# define VUINT64x4_CMPGT_DEFINED
#endif

#ifndef VUINT64x4_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 64, 4, 2)
# define VUINT64x4_CMPLE_DEFINED
#endif

#ifndef VUINT64x4_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 64, 4, 2)
# define VUINT64x4_CMPGE_DEFINED
#endif

#ifndef VUINT64x4_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 64, 4, 2)
# define VUINT64x4_MIN_DEFINED
#endif

#ifndef VUINT64x4_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 64, 4, 2)
# define VUINT64x4_MAX_DEFINED
#endif

#ifndef VUINT64x4_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 64, 4, 2)
# define VUINT64x4_RSHIFT_DEFINED
#endif

#ifndef VUINT64x4_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 64, 4, 2)
# define VUINT64x4_LRSHIFT_DEFINED
#endif

#ifndef VUINT64x4_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 64, 4, 2)
# define VUINT64x4_LSHIFT_DEFINED
#endif



/* vuint64x8 */

#ifndef VINT64x8_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(/* nothing */, 64, 8, 4)
# define VINT64x8_SPLAT_DEFINED
#endif

#ifndef VINT64x8_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(/* nothing */, 64, 8, 4)
# define VINT64x8_LOAD_ALIGNED_DEFINED
#endif

#ifndef VINT64x8_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(/* nothing */, 64, 8, 4)
# define VINT64x8_LOAD_DEFINED
#endif

#ifndef VINT64x8_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(/* nothing */, 64, 8, 4)
# define VINT64x8_STORE_ALIGNED_DEFINED
#endif

#ifndef VINT64x8_STORE_DEFINED
VEC_GENERIC_DBL_STORE(/* nothing */, 64, 8, 4)
# define VINT64x8_STORE_DEFINED
#endif

#ifndef VINT64x8_ADD_DEFINED
VEC_GENERIC_DBL_ADD(/* nothing */, 64, 8, 4)
# define VINT64x8_ADD_DEFINED
#endif

#ifndef VINT64x8_SUB_DEFINED
VEC_GENERIC_DBL_SUB(/* nothing */, 64, 8, 4)
# define VINT64x8_SUB_DEFINED
#endif

#ifndef VINT64x8_MUL_DEFINED
VEC_GENERIC_DBL_MUL(/* nothing */, 64, 8, 4)
# define VINT64x8_MUL_DEFINED
#endif

#ifndef VINT64x8_DIV_DEFINED
VEC_GENERIC_DBL_DIV(/* nothing */, 64, 8, 4)
# define VINT64x8_DIV_DEFINED
#endif

#ifndef VINT64x8_AVG_DEFINED
VEC_GENERIC_DBL_AVG(/* nothing */, 64, 8, 4)
# define VINT64x8_AVG_DEFINED
#endif

#ifndef VINT64x8_AND_DEFINED
VEC_GENERIC_DBL_AND(/* nothing */, 64, 8, 4)
# define VINT64x8_AND_DEFINED
#endif

#ifndef VINT64x8_OR_DEFINED
VEC_GENERIC_DBL_OR(/* nothing */, 64, 8, 4)
# define VINT64x8_OR_DEFINED
#endif

#ifndef VINT64x8_XOR_DEFINED
VEC_GENERIC_DBL_XOR(/* nothing */, 64, 8, 4)
# define VINT64x8_XOR_DEFINED
#endif

#ifndef VINT64x8_NOT_DEFINED
VEC_GENERIC_DBL_NOT(/* nothing */, 64, 8, 4)
# define VINT64x8_NOT_DEFINED
#endif

#ifndef VINT64x8_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(/* nothing */, 64, 8, 4)
# define VINT64x8_CMPLT_DEFINED
#endif

#ifndef VINT64x8_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(/* nothing */, 64, 8, 4)
# define VINT64x8_CMPEQ_DEFINED
#endif

#ifndef VINT64x8_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(/* nothing */, 64, 8, 4)
# define VINT64x8_CMPGT_DEFINED
#endif

#ifndef VINT64x8_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(/* nothing */, 64, 8, 4)
# define VINT64x8_CMPLE_DEFINED
#endif

#ifndef VINT64x8_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(/* nothing */, 64, 8, 4)
# define VINT64x8_CMPGE_DEFINED
#endif

#ifndef VINT64x8_MIN_DEFINED
VEC_GENERIC_DBL_MIN(/* nothing */, 64, 8, 4)
# define VINT64x8_MIN_DEFINED
#endif

#ifndef VINT64x8_MAX_DEFINED
VEC_GENERIC_DBL_MAX(/* nothing */, 64, 8, 4)
# define VINT64x8_MAX_DEFINED
#endif

#ifndef VINT64x8_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(/* nothing */, 64, 8, 4)
# define VINT64x8_RSHIFT_DEFINED
#endif

#ifndef VINT64x8_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(/* nothing */, 64, 8, 4)
# define VINT64x8_LRSHIFT_DEFINED
#endif

#ifndef VINT64x8_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(/* nothing */, 64, 8, 4)
# define VINT64x8_LSHIFT_DEFINED
#endif



/* vint64x8 */

#ifndef VUINT64x8_SPLAT_DEFINED
VEC_GENERIC_DBL_SPLAT(u, 64, 8, 4)
# define VUINT64x8_SPLAT_DEFINED
#endif

#ifndef VUINT64x8_LOAD_ALIGNED_DEFINED
VEC_GENERIC_DBL_LOAD_ALIGNED(u, 64, 8, 4)
# define VUINT64x8_LOAD_ALIGNED_DEFINED
#endif

#ifndef VUINT64x8_LOAD_DEFINED
VEC_GENERIC_DBL_LOAD(u, 64, 8, 4)
# define VUINT64x8_LOAD_DEFINED
#endif

#ifndef VUINT64x8_STORE_ALIGNED_DEFINED
VEC_GENERIC_DBL_STORE_ALIGNED(u, 64, 8, 4)
# define VUINT64x8_STORE_ALIGNED_DEFINED
#endif

#ifndef VUINT64x8_STORE_DEFINED
VEC_GENERIC_DBL_STORE(u, 64, 8, 4)
# define VUINT64x8_STORE_DEFINED
#endif

#ifndef VUINT64x8_ADD_DEFINED
VEC_GENERIC_DBL_ADD(u, 64, 8, 4)
# define VUINT64x8_ADD_DEFINED
#endif

#ifndef VUINT64x8_SUB_DEFINED
VEC_GENERIC_DBL_SUB(u, 64, 8, 4)
# define VUINT64x8_SUB_DEFINED
#endif

#ifndef VUINT64x8_MUL_DEFINED
VEC_GENERIC_DBL_MUL(u, 64, 8, 4)
# define VUINT64x8_MUL_DEFINED
#endif

#ifndef VUINT64x8_DIV_DEFINED
VEC_GENERIC_DBL_DIV(u, 64, 8, 4)
# define VUINT64x8_DIV_DEFINED
#endif

#ifndef VUINT64x8_AVG_DEFINED
VEC_GENERIC_DBL_AVG(u, 64, 8, 4)
# define VUINT64x8_AVG_DEFINED
#endif

#ifndef VUINT64x8_AND_DEFINED
VEC_GENERIC_DBL_AND(u, 64, 8, 4)
# define VUINT64x8_AND_DEFINED
#endif

#ifndef VUINT64x8_OR_DEFINED
VEC_GENERIC_DBL_OR(u, 64, 8, 4)
# define VUINT64x8_OR_DEFINED
#endif

#ifndef VUINT64x8_XOR_DEFINED
VEC_GENERIC_DBL_XOR(u, 64, 8, 4)
# define VUINT64x8_XOR_DEFINED
#endif

#ifndef VUINT64x8_NOT_DEFINED
VEC_GENERIC_DBL_NOT(u, 64, 8, 4)
# define VUINT64x8_NOT_DEFINED
#endif

#ifndef VUINT64x8_CMPLT_DEFINED
VEC_GENERIC_DBL_CMPLT(u, 64, 8, 4)
# define VUINT64x8_CMPLT_DEFINED
#endif

#ifndef VUINT64x8_CMPEQ_DEFINED
VEC_GENERIC_DBL_CMPEQ(u, 64, 8, 4)
# define VUINT64x8_CMPEQ_DEFINED
#endif

#ifndef VUINT64x8_CMPGT_DEFINED
VEC_GENERIC_DBL_CMPGT(u, 64, 8, 4)
# define VUINT64x8_CMPGT_DEFINED
#endif

#ifndef VUINT64x8_CMPLE_DEFINED
VEC_GENERIC_DBL_CMPLE(u, 64, 8, 4)
# define VUINT64x8_CMPLE_DEFINED
#endif

#ifndef VUINT64x8_CMPGE_DEFINED
VEC_GENERIC_DBL_CMPGE(u, 64, 8, 4)
# define VUINT64x8_CMPGE_DEFINED
#endif

#ifndef VUINT64x8_MIN_DEFINED
VEC_GENERIC_DBL_MIN(u, 64, 8, 4)
# define VUINT64x8_MIN_DEFINED
#endif

#ifndef VUINT64x8_MAX_DEFINED
VEC_GENERIC_DBL_MAX(u, 64, 8, 4)
# define VUINT64x8_MAX_DEFINED
#endif

#ifndef VUINT64x8_RSHIFT_DEFINED
VEC_GENERIC_DBL_RSHIFT(u, 64, 8, 4)
# define VUINT64x8_RSHIFT_DEFINED
#endif

#ifndef VUINT64x8_LRSHIFT_DEFINED
VEC_GENERIC_DBL_LRSHIFT(u, 64, 8, 4)
# define VUINT64x8_LRSHIFT_DEFINED
#endif

#ifndef VUINT64x8_LSHIFT_DEFINED
VEC_GENERIC_DBL_LSHIFT(u, 64, 8, 4)
# define VUINT64x8_LSHIFT_DEFINED
#endif

#endif /* VEC_IMPL_GENERIC_H_ */