We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 22c3522 commit ad45bb3Copy full SHA for ad45bb3
convert-falcon-hf-to-gguf.py
@@ -148,7 +148,7 @@ def count_model_parts(dir_model: str) -> int:
148
149
print("gguf: get gpt2 tokenizer vocab")
150
151
- vocab_size = len(tokenizer_json["model"]["vocab"])
+ vocab_size = hparams["vocab_size"]
152
153
# ref: https://github.com/cmp-nct/ggllm.cpp/blob/master/falcon_convert.py
154
tokenizer = AutoTokenizer.from_pretrained(dir_model)
0 commit comments