mirror of
https://github.com/ggml-org/llama.cpp.git
synced 2025-07-05 11:23:30 +00:00
convert.py : add consolidated.safetensors for mixtral 8x22b (#6587)
This commit is contained in:
@ -1350,7 +1350,7 @@ def load_some_model(path: Path) -> ModelPlus:
|
|||||||
# Be extra-friendly and accept either a file or a directory:
|
# Be extra-friendly and accept either a file or a directory:
|
||||||
if path.is_dir():
|
if path.is_dir():
|
||||||
# Check if it's a set of safetensors files first
|
# Check if it's a set of safetensors files first
|
||||||
globs = ["model-00001-of-*.safetensors", "model.safetensors"]
|
globs = ["model-00001-of-*.safetensors", "model.safetensors", "consolidated.safetensors"]
|
||||||
files = [file for glob in globs for file in path.glob(glob)]
|
files = [file for glob in globs for file in path.glob(glob)]
|
||||||
if not files:
|
if not files:
|
||||||
# Try the PyTorch patterns too, with lower priority
|
# Try the PyTorch patterns too, with lower priority
|
||||||
|
Reference in New Issue
Block a user