Fix loading Phi-3 (#48)
This commit is contained in:
committed by
GitHub
parent
e55a0738aa
commit
aabef6b341
@@ -859,7 +859,7 @@
|
||||
"model.config.pretraining_tp = 1\n",
|
||||
"\n",
|
||||
"# Load LLaMA tokenizer\n",
|
||||
"tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)\n",
|
||||
"tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=False)\n",
|
||||
"tokenizer.pad_token = \"<PAD>\"\n",
|
||||
"tokenizer.padding_side = \"left\""
|
||||
]
|
||||
@@ -1598,7 +1598,7 @@
|
||||
"\n",
|
||||
"# Load LLaMA tokenizer\n",
|
||||
"model_name = \"TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T\"\n",
|
||||
"tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)\n",
|
||||
"tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=False)\n",
|
||||
"tokenizer.pad_token = \"<PAD>\"\n",
|
||||
"tokenizer.padding_side = \"left\""
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user