llama : fix hparams shadow (#1367)

fixes #1363
This commit is contained in:
Pavol Rusnak
2023-05-08 16:48:21 +02:00
committed by GitHub
parent f9a6364912
commit 003ba2fb43

View File

@ -970,8 +970,6 @@ static void llama_model_load_internal(
// prepare memory for the weights
{
const auto & hparams = model.hparams;
const uint32_t n_embd = hparams.n_embd;
const uint32_t n_layer = hparams.n_layer;
const uint32_t n_vocab = hparams.n_vocab;