File size: 861 Bytes
4ee02da
e80d69b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline

# ✅ Lightweight model suitable for free Hugging Face Spaces
model_name = "microsoft/phi-2"

# Load tokenizer and model
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)

# Define chatbot function
def chat_with_llm(prompt):
    response = pipe(prompt, max_length=200, do_sample=True)
    return response[0]['generated_text']

# Gradio interface for web chat UI
gr.Interface(fn=chat_with_llm, 
             inputs=gr.Textbox(lines=2, placeholder="Type your question..."), 
             outputs="text", 
             title="🗨️ My Hugging Face Chatbot", 
             description="Powered by Phi-2 model. Free cloud chatbot.").launch()