thelip commited on
Commit
b4682e6
·
verified ·
1 Parent(s): b3f4159

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -2,12 +2,12 @@ import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
  import torch
4
  import os
5
-
6
  # Load model and tokenizer with the token from environment variables
7
  model_name = "meta-llama/Llama-2-7b-hf"
8
  token = os.getenv("HUGGINGFACE_TOKEN") # Get token from environment
9
- tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=token)
10
- model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=token, torch_dtype=torch.float16)
11
  model = model.to("cuda" if torch.cuda.is_available() else "cpu")
12
 
13
  # Function to generate responses
 
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
  import torch
4
  import os
5
+
6
  # Load model and tokenizer with the token from environment variables
7
  model_name = "meta-llama/Llama-2-7b-hf"
8
  token = os.getenv("HUGGINGFACE_TOKEN") # Get token from environment
9
+ tokenizer = AutoTokenizer.from_pretrained(model_name, token=token)
10
+ model = AutoModelForCausalLM.from_pretrained(model_name, token=token, torch_dtype=torch.float16)
11
  model = model.to("cuda" if torch.cuda.is_available() else "cpu")
12
 
13
  # Function to generate responses