1 // Copyright 2019 Google LLC
2 // SPDX-License-Identifier: Apache-2.0
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
8 // http://www.apache.org/licenses/LICENSE-2.0
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
18 #include <algorithm> // std::fill
22 #undef HWY_TARGET_INCLUDE
23 #define HWY_TARGET_INCLUDE "highway_test.cc"
24 #include "hwy/foreach_target.h" // IWYU pragma: keep
25 #include "hwy/highway.h"
26 #include "hwy/nanobenchmark.h" // Unpredictable1
27 #include "hwy/tests/test_util-inl.h"
29 HWY_BEFORE_NAMESPACE();
31 namespace HWY_NAMESPACE
{
33 template <size_t kLimit
, typename T
>
34 HWY_NOINLINE
void TestCappedLimit(T
/* tag */) {
35 CappedTag
<T
, kLimit
> d
;
36 // Ensure two ops compile
37 HWY_ASSERT_VEC_EQ(d
, Zero(d
), Set(d
, T
{0}));
39 // Ensure we do not write more than kLimit lanes
40 const size_t N
= Lanes(d
);
42 auto lanes
= AllocateAligned
<T
>(N
);
44 std::fill(lanes
.get(), lanes
.get() + N
, T
{0});
45 Store(Set(d
, T
{1}), d
, lanes
.get());
46 for (size_t i
= kLimit
; i
< N
; ++i
) {
47 HWY_ASSERT_EQ(lanes
[i
], T
{0});
52 // Adapter for ForAllTypes - we are constructing our own Simd<> and thus do not
53 // use ForPartialVectors etc.
56 void operator()(T t
) const {
57 TestCappedLimit
<1>(t
);
58 TestCappedLimit
<3>(t
);
59 TestCappedLimit
<5>(t
);
60 TestCappedLimit
<1ull << 15>(t
);
64 HWY_NOINLINE
void TestAllCapped() { ForAllTypes(TestCapped()); }
66 // For testing that ForPartialVectors reaches every possible size:
67 using NumLanesSet
= std::bitset
<HWY_MAX_BYTES
+ 1>;
69 // Monostate pattern because ForPartialVectors takes a template argument, not a
70 // functor by reference.
71 static NumLanesSet
* NumLanesForSize(size_t sizeof_t
) {
72 HWY_ASSERT(sizeof_t
<= sizeof(uint64_t));
73 static NumLanesSet num_lanes
[sizeof(uint64_t) + 1];
74 return num_lanes
+ sizeof_t
;
76 static size_t* MaxLanesForSize(size_t sizeof_t
) {
77 HWY_ASSERT(sizeof_t
<= sizeof(uint64_t));
78 static size_t num_lanes
[sizeof(uint64_t) + 1] = {0};
79 return num_lanes
+ sizeof_t
;
83 template <class T
, class D
>
84 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
85 const size_t N
= Lanes(d
);
86 const size_t kMax
= MaxLanes(d
); // for RVV, includes LMUL
87 HWY_ASSERT(N
<= kMax
);
88 HWY_ASSERT(kMax
<= (HWY_MAX_BYTES
/ sizeof(T
)));
90 NumLanesForSize(sizeof(T
))->set(N
);
91 *MaxLanesForSize(sizeof(T
)) = HWY_MAX(*MaxLanesForSize(sizeof(T
)), N
);
95 HWY_NOINLINE
void TestAllMaxLanes() {
96 ForAllTypes(ForPartialVectors
<TestMaxLanes
>());
98 // Ensure ForPartialVectors visited all powers of two [1, N].
99 for (size_t sizeof_t
: {sizeof(uint8_t), sizeof(uint16_t), sizeof(uint32_t),
101 const size_t N
= *MaxLanesForSize(sizeof_t
);
102 for (size_t i
= 1; i
<= N
; i
+= i
) {
103 if (!NumLanesForSize(sizeof_t
)->test(i
)) {
104 fprintf(stderr
, "T=%d: did not visit for N=%d, max=%d\n",
105 static_cast<int>(sizeof_t
), static_cast<int>(i
),
106 static_cast<int>(N
));
114 template <class T
, class D
>
115 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
117 const Vec
<D
> v0
= Zero(d
);
118 const size_t N
= Lanes(d
);
119 auto expected
= AllocateAligned
<T
>(N
);
120 HWY_ASSERT(expected
);
121 std::fill(expected
.get(), expected
.get() + N
, T
{0});
122 HWY_ASSERT_VEC_EQ(d
, expected
.get(), v0
);
125 const Vec
<D
> v2
= Set(d
, T
{2});
126 for (size_t i
= 0; i
< N
; ++i
) {
129 HWY_ASSERT_VEC_EQ(d
, expected
.get(), v2
);
132 const Vec
<D
> vi
= Iota(d
, T(5));
133 for (size_t i
= 0; i
< N
; ++i
) {
134 expected
[i
] = T(5 + i
);
136 HWY_ASSERT_VEC_EQ(d
, expected
.get(), vi
);
138 // Undefined. This may result in a 'using uninitialized memory' warning
139 // here, even though we already suppress warnings in Undefined.
140 HWY_DIAGNOSTICS(push
)
141 HWY_DIAGNOSTICS_OFF(disable
: 4700, ignored
"-Wuninitialized")
142 #if HWY_COMPILER_GCC_ACTUAL
143 HWY_DIAGNOSTICS_OFF(disable
: 4701, ignored
"-Wmaybe-uninitialized")
145 const Vec
<D
> vu
= Undefined(d
);
146 Store(vu
, d
, expected
.get());
151 HWY_NOINLINE
void TestAllSet() { ForAllTypes(ForPartialVectors
<TestSet
>()); }
153 // Ensures wraparound (mod 2^bits)
154 struct TestOverflow
{
155 template <class T
, class D
>
156 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
157 const Vec
<D
> v1
= Set(d
, T
{1});
158 const Vec
<D
> vmax
= Set(d
, LimitsMax
<T
>());
159 const Vec
<D
> vmin
= Set(d
, LimitsMin
<T
>());
160 // Unsigned underflow / negative -> positive
161 HWY_ASSERT_VEC_EQ(d
, vmax
, Sub(vmin
, v1
));
162 // Unsigned overflow / positive -> negative
163 HWY_ASSERT_VEC_EQ(d
, vmin
, Add(vmax
, v1
));
167 HWY_NOINLINE
void TestAllOverflow() {
168 ForIntegerTypes(ForPartialVectors
<TestOverflow
>());
172 template <class T
, class D
>
173 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
174 const Vec
<D
> v0
= Zero(d
);
175 const Vec
<D
> v1
= Set(d
, T
{1});
176 const Vec
<D
> v2
= Set(d
, T
{2});
178 HWY_ASSERT_VEC_EQ(d
, v1
, Clamp(v2
, v0
, v1
));
179 HWY_ASSERT_VEC_EQ(d
, v1
, Clamp(v0
, v1
, v2
));
183 HWY_NOINLINE
void TestAllClamp() {
184 ForAllTypes(ForPartialVectors
<TestClamp
>());
187 struct TestSignBitInteger
{
188 template <class T
, class D
>
189 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
190 const Vec
<D
> v0
= Zero(d
);
191 const Vec
<D
> all
= VecFromMask(d
, Eq(v0
, v0
));
192 const Vec
<D
> vs
= SignBit(d
);
193 const Vec
<D
> other
= Sub(vs
, Set(d
, T
{1}));
195 // Shifting left by one => overflow, equal zero
196 HWY_ASSERT_VEC_EQ(d
, v0
, Add(vs
, vs
));
197 // Verify the lower bits are zero (only +/- and logical ops are available
199 HWY_ASSERT_VEC_EQ(d
, all
, Add(vs
, other
));
203 struct TestSignBitFloat
{
204 template <class T
, class D
>
205 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
206 const Vec
<D
> v0
= Zero(d
);
207 const Vec
<D
> vs
= SignBit(d
);
208 const Vec
<D
> vp
= Set(d
, static_cast<T
>(2.25));
209 const Vec
<D
> vn
= Set(d
, static_cast<T
>(-2.25));
210 HWY_ASSERT_VEC_EQ(d
, Or(vp
, vs
), vn
);
211 HWY_ASSERT_VEC_EQ(d
, AndNot(vs
, vn
), vp
);
212 HWY_ASSERT_VEC_EQ(d
, v0
, vs
);
216 HWY_NOINLINE
void TestAllSignBit() {
217 ForIntegerTypes(ForPartialVectors
<TestSignBitInteger
>());
218 ForFloatTypes(ForPartialVectors
<TestSignBitFloat
>());
221 // inline to work around incorrect SVE codegen (only first 128 bits used).
222 template <class D
, class V
>
223 HWY_INLINE
void AssertNaN(D d
, VecArg
<V
> v
, const char* file
, int line
) {
225 const size_t N
= Lanes(d
);
226 if (!AllTrue(d
, IsNaN(v
))) {
227 Print(d
, "not all NaN", v
, 0, N
);
228 Print(d
, "mask", VecFromMask(d
, IsNaN(v
)), 0, N
);
229 const std::string type_name
= TypeName(T(), N
);
230 // RVV lacks PRIu64 and MSYS still has problems with %zu, so print bytes to
231 // avoid truncating doubles.
232 uint8_t bytes
[HWY_MAX(sizeof(T
), 8)] = {0};
233 const T lane
= GetLane(v
);
234 CopyBytes
<sizeof(T
)>(&lane
, bytes
);
236 "Expected %s NaN, got %E (bytes %02x %02x %02x %02x %02x %02x %02x "
238 type_name
.c_str(), lane
, bytes
[0], bytes
[1], bytes
[2], bytes
[3],
239 bytes
[4], bytes
[5], bytes
[6], bytes
[7]);
243 #define HWY_ASSERT_NAN(d, v) AssertNaN(d, v, __FILE__, __LINE__)
246 template <class T
, class D
>
247 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
248 const Vec
<D
> v1
= Set(d
, static_cast<T
>(Unpredictable1()));
249 const Vec
<D
> nan
= IfThenElse(Eq(v1
, Set(d
, T
{1})), NaN(d
), v1
);
250 HWY_ASSERT_NAN(d
, nan
);
253 HWY_ASSERT_NAN(d
, Add(nan
, v1
));
254 HWY_ASSERT_NAN(d
, Add(v1
, nan
));
255 HWY_ASSERT_NAN(d
, Sub(nan
, v1
));
256 HWY_ASSERT_NAN(d
, Sub(v1
, nan
));
257 HWY_ASSERT_NAN(d
, Mul(nan
, v1
));
258 HWY_ASSERT_NAN(d
, Mul(v1
, nan
));
259 HWY_ASSERT_NAN(d
, Div(nan
, v1
));
260 HWY_ASSERT_NAN(d
, Div(v1
, nan
));
263 HWY_ASSERT_NAN(d
, MulAdd(nan
, v1
, v1
));
264 HWY_ASSERT_NAN(d
, MulAdd(v1
, nan
, v1
));
265 HWY_ASSERT_NAN(d
, MulAdd(v1
, v1
, nan
));
266 HWY_ASSERT_NAN(d
, MulSub(nan
, v1
, v1
));
267 HWY_ASSERT_NAN(d
, MulSub(v1
, nan
, v1
));
268 HWY_ASSERT_NAN(d
, MulSub(v1
, v1
, nan
));
269 HWY_ASSERT_NAN(d
, NegMulAdd(nan
, v1
, v1
));
270 HWY_ASSERT_NAN(d
, NegMulAdd(v1
, nan
, v1
));
271 HWY_ASSERT_NAN(d
, NegMulAdd(v1
, v1
, nan
));
272 HWY_ASSERT_NAN(d
, NegMulSub(nan
, v1
, v1
));
273 HWY_ASSERT_NAN(d
, NegMulSub(v1
, nan
, v1
));
274 HWY_ASSERT_NAN(d
, NegMulSub(v1
, v1
, nan
));
277 HWY_ASSERT_NAN(d
, Sqrt(nan
));
280 HWY_ASSERT_NAN(d
, Abs(nan
));
281 HWY_ASSERT_NAN(d
, Neg(nan
));
282 HWY_ASSERT_NAN(d
, CopySign(nan
, v1
));
283 HWY_ASSERT_NAN(d
, CopySignToAbs(nan
, v1
));
286 HWY_ASSERT_NAN(d
, Ceil(nan
));
287 HWY_ASSERT_NAN(d
, Floor(nan
));
288 HWY_ASSERT_NAN(d
, Round(nan
));
289 HWY_ASSERT_NAN(d
, Trunc(nan
));
291 // Logical (And/AndNot/Xor will clear NaN!)
292 HWY_ASSERT_NAN(d
, Or(nan
, v1
));
295 HWY_ASSERT(AllFalse(d
, Eq(nan
, v1
)));
296 HWY_ASSERT(AllFalse(d
, Gt(nan
, v1
)));
297 HWY_ASSERT(AllFalse(d
, Lt(nan
, v1
)));
298 HWY_ASSERT(AllFalse(d
, Ge(nan
, v1
)));
299 HWY_ASSERT(AllFalse(d
, Le(nan
, v1
)));
302 HWY_ASSERT_NAN(d
, SumOfLanes(d
, nan
));
303 HWY_ASSERT_NAN(d
, Set(d
, ReduceSum(d
, nan
)));
304 // TODO(janwas): re-enable after QEMU/Spike are fixed
305 #if HWY_TARGET != HWY_RVV
306 HWY_ASSERT_NAN(d
, MinOfLanes(d
, nan
));
307 HWY_ASSERT_NAN(d
, MaxOfLanes(d
, nan
));
311 #if (HWY_ARCH_X86 || HWY_ARCH_WASM) && (HWY_TARGET < HWY_EMU128)
312 // Native WASM or x86 SIMD return the second operand if any input is NaN.
313 HWY_ASSERT_VEC_EQ(d
, v1
, Min(nan
, v1
));
314 HWY_ASSERT_VEC_EQ(d
, v1
, Max(nan
, v1
));
315 HWY_ASSERT_NAN(d
, Min(v1
, nan
));
316 HWY_ASSERT_NAN(d
, Max(v1
, nan
));
317 #elif HWY_TARGET <= HWY_NEON_WITHOUT_AES && HWY_ARCH_ARM_V7
318 // Armv7 NEON returns NaN if any input is NaN.
319 HWY_ASSERT_NAN(d
, Min(v1
, nan
));
320 HWY_ASSERT_NAN(d
, Max(v1
, nan
));
321 HWY_ASSERT_NAN(d
, Min(nan
, v1
));
322 HWY_ASSERT_NAN(d
, Max(nan
, v1
));
324 // IEEE 754-2019 minimumNumber is defined as the other argument if exactly
325 // one is NaN, and qNaN if both are.
326 HWY_ASSERT_VEC_EQ(d
, v1
, Min(nan
, v1
));
327 HWY_ASSERT_VEC_EQ(d
, v1
, Max(nan
, v1
));
328 HWY_ASSERT_VEC_EQ(d
, v1
, Min(v1
, nan
));
329 HWY_ASSERT_VEC_EQ(d
, v1
, Max(v1
, nan
));
331 HWY_ASSERT_NAN(d
, Min(nan
, nan
));
332 HWY_ASSERT_NAN(d
, Max(nan
, nan
));
336 // For functions only available for float32
338 template <class T
, class D
>
339 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
340 const Vec
<D
> v1
= Set(d
, static_cast<T
>(Unpredictable1()));
341 const Vec
<D
> nan
= IfThenElse(Eq(v1
, Set(d
, T
{1})), NaN(d
), v1
);
342 HWY_ASSERT_NAN(d
, ApproximateReciprocal(nan
));
343 HWY_ASSERT_NAN(d
, ApproximateReciprocalSqrt(nan
));
344 HWY_ASSERT_NAN(d
, AbsDiff(nan
, v1
));
345 HWY_ASSERT_NAN(d
, AbsDiff(v1
, nan
));
349 HWY_NOINLINE
void TestAllNaN() {
350 ForFloatTypes(ForPartialVectors
<TestNaN
>());
351 ForPartialVectors
<TestF32NaN
>()(float());
355 template <class T
, class D
>
356 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
357 const Vec
<D
> v1
= Set(d
, static_cast<T
>(Unpredictable1()));
358 const Vec
<D
> inf
= IfThenElse(Eq(v1
, Set(d
, T
{1})), Inf(d
), v1
);
359 const Vec
<D
> nan
= IfThenElse(Eq(v1
, Set(d
, T
{1})), NaN(d
), v1
);
360 const Vec
<D
> neg
= Set(d
, T
{-1});
361 HWY_ASSERT_NAN(d
, nan
);
362 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsNaN(inf
));
363 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsNaN(CopySign(inf
, neg
)));
364 HWY_ASSERT_MASK_EQ(d
, MaskTrue(d
), IsNaN(nan
));
365 HWY_ASSERT_MASK_EQ(d
, MaskTrue(d
), IsNaN(CopySign(nan
, neg
)));
366 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsNaN(v1
));
367 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsNaN(Zero(d
)));
368 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsNaN(Set(d
, hwy::LowestValue
<T
>())));
369 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsNaN(Set(d
, hwy::HighestValue
<T
>())));
373 HWY_NOINLINE
void TestAllIsNaN() {
374 ForFloatTypes(ForPartialVectors
<TestIsNaN
>());
378 template <class T
, class D
>
379 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
380 const Vec
<D
> v1
= Set(d
, static_cast<T
>(Unpredictable1()));
381 const Vec
<D
> inf
= IfThenElse(Eq(v1
, Set(d
, T
{1})), Inf(d
), v1
);
382 const Vec
<D
> nan
= IfThenElse(Eq(v1
, Set(d
, T
{1})), NaN(d
), v1
);
383 const Vec
<D
> neg
= Set(d
, T
{-1});
384 HWY_ASSERT_MASK_EQ(d
, MaskTrue(d
), IsInf(inf
));
385 HWY_ASSERT_MASK_EQ(d
, MaskTrue(d
), IsInf(CopySign(inf
, neg
)));
386 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsInf(nan
));
387 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsInf(CopySign(nan
, neg
)));
388 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsInf(v1
));
389 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsInf(Zero(d
)));
390 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsInf(Set(d
, hwy::LowestValue
<T
>())));
391 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsInf(Set(d
, hwy::HighestValue
<T
>())));
395 HWY_NOINLINE
void TestAllIsInf() {
396 ForFloatTypes(ForPartialVectors
<TestIsInf
>());
399 struct TestIsFinite
{
400 template <class T
, class D
>
401 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
402 const Vec
<D
> v1
= Set(d
, static_cast<T
>(Unpredictable1()));
403 const Vec
<D
> inf
= IfThenElse(Eq(v1
, Set(d
, T
{1})), Inf(d
), v1
);
404 const Vec
<D
> nan
= IfThenElse(Eq(v1
, Set(d
, T
{1})), NaN(d
), v1
);
405 const Vec
<D
> neg
= Set(d
, T
{-1});
406 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsFinite(inf
));
407 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsFinite(CopySign(inf
, neg
)));
408 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsFinite(nan
));
409 HWY_ASSERT_MASK_EQ(d
, MaskFalse(d
), IsFinite(CopySign(nan
, neg
)));
410 HWY_ASSERT_MASK_EQ(d
, MaskTrue(d
), IsFinite(v1
));
411 HWY_ASSERT_MASK_EQ(d
, MaskTrue(d
), IsFinite(Zero(d
)));
412 HWY_ASSERT_MASK_EQ(d
, MaskTrue(d
), IsFinite(Set(d
, hwy::LowestValue
<T
>())));
413 HWY_ASSERT_MASK_EQ(d
, MaskTrue(d
),
414 IsFinite(Set(d
, hwy::HighestValue
<T
>())));
418 HWY_NOINLINE
void TestAllIsFinite() {
419 ForFloatTypes(ForPartialVectors
<TestIsFinite
>());
422 struct TestCopyAndAssign
{
423 template <class T
, class D
>
424 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
426 const Vec
<D
> v3
= Iota(d
, 3);
428 HWY_ASSERT_VEC_EQ(d
, v3
, v3b
);
431 auto v3c
= Undefined(d
);
433 HWY_ASSERT_VEC_EQ(d
, v3
, v3c
);
437 HWY_NOINLINE
void TestAllCopyAndAssign() {
438 ForAllTypes(ForPartialVectors
<TestCopyAndAssign
>());
442 template <class T
, class D
>
443 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
444 HWY_ASSERT_EQ(T
{0}, GetLane(Zero(d
)));
445 HWY_ASSERT_EQ(T
{1}, GetLane(Set(d
, T
{1})));
449 HWY_NOINLINE
void TestAllGetLane() {
450 ForAllTypes(ForPartialVectors
<TestGetLane
>());
454 template <class T
, class D
>
455 HWY_NOINLINE
void operator()(T
/*unused*/, D d
) {
456 const Vec
<D
> v0
= Zero(d
);
457 using D0
= DFromV
<decltype(v0
)>; // not necessarily same as D
458 const Vec
<D
> v0b
= And(v0
, Set(D0(), T
{1})); // vectors can interoperate
459 HWY_ASSERT_VEC_EQ(d
, v0
, v0b
);
463 HWY_NOINLINE
void TestAllDFromV() {
464 ForAllTypes(ForPartialVectors
<TestDFromV
>());
467 // NOLINTNEXTLINE(google-readability-namespace-comments)
468 } // namespace HWY_NAMESPACE
470 HWY_AFTER_NAMESPACE();
475 HWY_BEFORE_TEST(HighwayTest
);
476 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllCapped
);
477 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllMaxLanes
);
478 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllSet
);
479 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllOverflow
);
480 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllClamp
);
481 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllSignBit
);
482 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllNaN
);
483 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllIsNaN
);
484 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllIsInf
);
485 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllIsFinite
);
486 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllCopyAndAssign
);
487 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllGetLane
);
488 HWY_EXPORT_AND_TEST_P(HighwayTest
, TestAllDFromV
);