CAI_P / app.py
shukdevdattaEX's picture
Update app.py
7dfcc65 verified
import gradio as gr
import openai
import json
import os
from typing import List, Tuple, Optional
from datetime import datetime
class ChatbotManager:
def __init__(self):
self.conversation_history = []
self.current_api_key = None
self.current_model = "gpt-3.5-turbo"
self.system_prompt = "You are a helpful AI assistant. Respond in a friendly and informative manner." #default
self.max_tokens = 150
self.temperature = 0.7
def set_api_key(self, api_key: str) -> str:
if not api_key.strip():
return "❌ Please enter a valid API key"
self.current_api_key = api_key.strip()
openai.api_key = self.current_api_key
try:
openai.Model.list()
return "βœ… API key validated successfully!"
except Exception as e:
return f"❌ Invalid API key: {str(e)}"
def update_settings(self, model: str, system_prompt: str, max_tokens: int, temperature: float) -> str:
self.current_model = model
self.system_prompt = system_prompt
self.max_tokens = max_tokens
self.temperature = temperature
return f"βœ… Settings updated: Model={model}, Max Tokens={max_tokens}, Temperature={temperature}"
def preprocess_data(self, data_text: str) -> str: ### we are integrating the custom data to the existing KB of model
if not data_text.strip():
return "No custom data provided"
base_prompt = "You are a helpful AI assistant. Respond in a friendly and informative manner."
self.system_prompt = base_prompt + f"\n\nAdditional Context:\n{data_text}"
return f"βœ… Custom data integrated ({len(data_text)} characters)"
def generate_response(self,user_input:str, history: List[Tuple[str, str]])-> Tuple[str, List[Tuple[str, str]]]:
if not self.current_api_key:
return "❌ Please set your API key first!", history
if not user_input.strip():
return "Please enter a message.", history
try:
messages=[{"role": "system", "content": self.system_prompt}]
for user_msg, assistant_msg in history:
messages.append({"role": "user", "content": user_msg})
messages.append({"role": "assistant", "content": assistant_msg})
messages.append({"role": "user", "content": user_input})
response=openai.ChatCompletion.create(
model=self.current_model,
messages=messages,
max_tokens=self.max_tokens,
temperature=self.temperature,
n=1,
stop=None,
)
assistant_response = response.choices[0].message.content.strip()
history.append((user_input, assistant_response))
return assistant_response, history
except Exception as e:
error_msg = f"❌ Error generating response: {str(e)}"
return error_msg, history
def clear_conversation(self) -> Tuple[str, List[Tuple[str, str]]]:
self.conversation_history = []
return "", []
chatbot=ChatbotManager()
AVAILABLE_MODELS = [ #dropdown for models openai==0.28
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
"gpt-4",
"gpt-4-32k",
"gpt-4-0613",
"gpt-4-32k-0613"
]
def create_interface():
with gr.Blocks(title="LLM-Based Chatbot", theme=gr.themes.Ocean()) as demo:
gr.Markdown("""
# πŸ€– LLM-Based Conversational AI Chatbot
This chatbot leverages powerful Language Models to provide intelligent conversations.
Enter your OpenAI API key to get started!
""")
with gr.Tab("Chat Interface"):
with gr.Row():
with gr.Column(scale=3):
chatbot_interface = gr.Chatbot(
label="Conversation",
height=400,
show_label=True,
avatar_images=("user.png", "assistant.png"),
show_copy_button=True,
bubble_full_width=False,
)
with gr.Row():
user_input = gr.Textbox(
placeholder="Type your message here...",
scale=4,
show_label=False,
container=False
)
send_btn = gr.Button("πŸ“€ Send", variant="primary", scale=1)
with gr.Row():
clear_btn = gr.Button("πŸ—‘οΈ Clear Chat")
regenerate_btn = gr.Button("πŸ”„ Regenerate")
with gr.Column(scale=1):
gr.Markdown("### πŸ”§ Quick Settings")
api_key_input = gr.Textbox(
label="πŸ”‘ OpenAI API Key",
placeholder="sk-...",
type="password"
)
api_status = gr.Textbox(
label="API Status",
interactive=False,
value="❌ No API key provided"
)
model_dropdown = gr.Dropdown(
choices=AVAILABLE_MODELS,
value="gpt-3.5-turbo",
label="πŸ€– Model"
)
max_tokens_slider = gr.Slider(
minimum=50,
maximum=4096,
value=150,
step=10,
label="πŸ“ Max Tokens"
)
temperature_slider = gr.Slider(
minimum=0.0,
maximum=1.0,
value=0.7,
step=0.1,
label="🌑️ Temperature"
)
gr.Markdown("### πŸ“Š Current Settings")
current_settings = gr.Textbox(
value="Model: gpt-3.5-turbo\nTokens: 150\nTemp: 0.7",
label="Active Configuration",
interactive=False,
lines=3
)
with gr.Tab("βš™οΈ Advanced Settings"):
gr.Markdown("### 🎯 System Prompt Configuration")
system_prompt_input = gr.Textbox(
label="System Prompt",
value="You are a helpful AI assistant. Respond in a friendly and informative manner.",
lines=5,
placeholder="Enter custom system prompt..."
)
gr.Markdown("### πŸ“š Custom Data Integration")
custom_data_input = gr.Textbox(
label="Custom Training Data",
lines=10,
placeholder="Enter custom data, FAQs, or domain-specific information..."
)
with gr.Row():
update_settings_btn = gr.Button("βœ… Update Settings")
integrate_data_btn = gr.Button("πŸ“Š Integrate Custom Data")
reset_prompt_btn = gr.Button("πŸ”„ Reset to Default")
settings_status = gr.Textbox(
label="Settings Status",
interactive=False
)
gr.Markdown("### 🎭 Preset System Prompts")
with gr.Row():
preset_customer_support = gr.Button("πŸ‘₯ Customer Support")
preset_tutor = gr.Button("πŸŽ“ Educational Tutor")
preset_creative = gr.Button("✨ Creative Assistant")
preset_technical = gr.Button("πŸ”§ Technical Writer")
#### Event Handling
def handle_api_key(api_key):
status = chatbot.set_api_key(api_key)
return status
# Connect events
api_key_input.change(
handle_api_key,
inputs=[api_key_input],
outputs=[api_status]
)
def handle_chat(user_input, history): #user query, chat history
if not user_input.strip():
return history or [], ""
response, updated_history = chatbot.generate_response(user_input, history or [])
return updated_history, ""
send_btn.click(
handle_chat,
inputs=[user_input, chatbot_interface],
outputs=[chatbot_interface, user_input]
)
user_input.submit(
handle_chat,
inputs=[user_input, chatbot_interface],
outputs=[chatbot_interface, user_input]
)
def handle_settings_update(model, system_prompt, max_tokens, temperature):
status = chatbot.update_settings(model, system_prompt, max_tokens, temperature)
settings_display = f"Model: {model}\nTokens: {max_tokens}\nTemp: {temperature}"
return status, settings_display
update_settings_btn.click(
handle_settings_update,
inputs=[model_dropdown, system_prompt_input, max_tokens_slider, temperature_slider],
outputs=[settings_status, current_settings]
)
def handle_data_integration(custom_data):
status = chatbot.preprocess_data(custom_data)
return status
integrate_data_btn.click(
handle_data_integration,
inputs=[custom_data_input],
outputs=[settings_status]
)
def handle_clear():
return chatbot.clear_conversation()
clear_btn.click(
handle_clear,
outputs=[user_input, chatbot_interface]
)
def handle_regenerate(history):
if not history:
return history or []
last_user_msg = history[-1][0] ### what is the national bird of Bangladesh? ask again.... Ans: Dove.... Q. what is the national bird of Bangladesh? Ans: Doel
history_without_last = history[:-1]
response, updated_history = chatbot.generate_response(last_user_msg, history_without_last)
return updated_history
regenerate_btn.click(
handle_regenerate,
inputs=[chatbot_interface],
outputs=[chatbot_interface]
)
def update_settings_display(model, max_tokens, temperature):
return f"Model: {model}\nTokens: {max_tokens}\nTemp: {temperature}"
for component in [model_dropdown, max_tokens_slider, temperature_slider]:
component.change(
update_settings_display,
inputs=[model_dropdown, max_tokens_slider, temperature_slider],
outputs=[current_settings]
)
def reset_prompt():
default_prompt = "You are a helpful AI assistant. Respond in a friendly and informative manner."
return default_prompt, "βœ… System prompt reset to default"
reset_prompt_btn.click(
reset_prompt,
outputs=[system_prompt_input, settings_status]
)
def load_preset_prompt(preset_type):
presets = {
"customer_support": "You are a helpful customer support representative. You are friendly, professional, and knowledgeable. Always try to resolve customer issues and provide clear solutions. If you cannot solve a problem, escalate it politely. Always give complete responses.",
"tutor": "You are an experienced tutor. Explain concepts clearly, use examples, and encourage students when they struggle. Break down complex problems into smaller, manageable steps. Always check for understanding. Always give complete responses.",
"creative": "You are a creative writing assistant who helps with stories, poems, and creative content. Provide constructive feedback, suggest improvements, and inspire creativity while maintaining quality standards. Always give complete responses.",
"technical": "You are a technical writer who creates clear, concise documentation. Use precise language, provide examples when relevant, and structure information logically for developers and technical users. Always give complete responses."
}
return presets.get(preset_type, ""), f"βœ… Loaded {preset_type.replace('_', ' ').title()} preset"
preset_customer_support.click(
lambda: load_preset_prompt("customer_support"),
outputs=[system_prompt_input, settings_status]
)
preset_tutor.click(
lambda: load_preset_prompt("tutor"),
outputs=[system_prompt_input, settings_status]
)
preset_creative.click(
lambda: load_preset_prompt("creative"),
outputs=[system_prompt_input, settings_status]
)
preset_technical.click(
lambda: load_preset_prompt("technical"),
outputs=[system_prompt_input, settings_status]
)
return demo
if __name__ == "__main__":
demo = create_interface()
demo.launch(
share=True
)