Update app.py
Browse files
app.py
CHANGED
@@ -2,8 +2,8 @@ import torch
|
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import gradio as gr
|
4 |
|
5 |
-
# Load
|
6 |
-
model_name = "
|
7 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
8 |
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
|
9 |
|
@@ -27,7 +27,7 @@ demo = gr.Interface(
|
|
27 |
fn=chat,
|
28 |
inputs=["dropdown", "text"],
|
29 |
outputs="text",
|
30 |
-
title="
|
31 |
description="Select a character and chat with their AI version.",
|
32 |
examples=[["Albert Einstein", "What is relativity?"], ["Cristiano Ronaldo", "How do you stay motivated?"]]
|
33 |
)
|
|
|
2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
3 |
import gradio as gr
|
4 |
|
5 |
+
# Load Llama-2 model
|
6 |
+
model_name = "meta-llama/Llama-2-7b-chat-hf"
|
7 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
8 |
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
|
9 |
|
|
|
27 |
fn=chat,
|
28 |
inputs=["dropdown", "text"],
|
29 |
outputs="text",
|
30 |
+
title="Chat with AI Celebrities",
|
31 |
description="Select a character and chat with their AI version.",
|
32 |
examples=[["Albert Einstein", "What is relativity?"], ["Cristiano Ronaldo", "How do you stay motivated?"]]
|
33 |
)
|