diff --git a/src/llama-7b/model/model.py b/src/llama-7b/model/model.py index 6dadc57..a2d5388 100644 --- a/src/llama-7b/model/model.py +++ b/src/llama-7b/model/model.py @@ -13,7 +13,7 @@ def __init__(self, **kwargs) -> None: self._tokenizer = None def load(self): - self._tokenizer = LlamaTokenizer.from_pretrained("decapoda-research/llama-7b-hf") + self._tokenizer = LlamaTokenizer.from_pretrained("huggyllama/llama-7b") self._model = LlamaForCausalLM.from_pretrained( str(self._data_dir), torch_dtype=torch.float16,