transformers/temp_llama.py

4 lines
202 B
Python

from transformers import LlamaForCausalLM, CodeLlamaTokenizer
model = LlamaForCausalLM.from_pretrained("codellama/CodeLlama-7b-hf")
model = LlamaForCausalLM.from_pretrained("codellama/CodeLlama-7b-hf")