mirror of
https://github.com/ggml-org/llama.cpp.git
synced 2025-08-17 05:25:09 -04:00
Revert "weights_only" arg - this causing more trouble than help
This commit is contained in:
@@ -86,8 +86,7 @@ for p in range(n_parts):
|
||||
if (p > 0):
|
||||
fname_out = sys.argv[1] + "/ggml-model-" + ftype_str[ftype] + ".bin" + "." + str(p)
|
||||
|
||||
# weights_only requires torch 1.13.1, remove this param or update if you get an "invalid keyword argument" error
|
||||
model = torch.load(fname_model, map_location="cpu", weights_only=True)
|
||||
model = torch.load(fname_model, map_location="cpu")
|
||||
|
||||
fout = open(fname_out, "wb")
|
||||
|
||||
|
Reference in New Issue
Block a user