mirror of
https://github.com/ggml-org/llama.cpp.git
synced 2025-07-28 13:20:27 -04:00
11 lines
367 B
Plaintext
11 lines
367 B
Plaintext
![]() |
// This file has been autogenerated by generate-variants.py, do not edit manually.
|
||
|
|
||
|
#include "../fattn-wmma-f16.cuh"
|
||
|
|
||
|
DECL_FATTN_WMMA_F16_CASE(64, 16, float);
|
||
|
DECL_FATTN_WMMA_F16_CASE(80, 16, float);
|
||
|
DECL_FATTN_WMMA_F16_CASE(96, 16, float);
|
||
|
DECL_FATTN_WMMA_F16_CASE(112, 16, float);
|
||
|
DECL_FATTN_WMMA_F16_CASE(128, 16, float);
|
||
|
DECL_FATTN_WMMA_F16_CASE(256, 16, float);
|