host / app.py
abdullahalioo's picture
Update app.py
58d1449 verified
import os
import logging
import gradio as gr
import g4f
from g4f.client import Client
# Configure logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
# Initialize g4f client
client = Client()
# Define the chat function for Gradio
def chat_function(messages, model='gpt-4o-mini'):
try:
# Add system prompt
system_prompt = {
"role": "system",
"content": "You are orion helpful AI assistant. You provide accurate, informative, and friendly responses while keeping them concise and relevant and you are make by Abdullah ali who is 13 years old."
}
# Insert system prompt at the beginning if not already present
if not messages or messages[0].get('role') != 'system':
messages.insert(0, system_prompt)
logger.debug(f"Sending request to g4f with model: {model} and messages: {messages}")
# Call the g4f API
response = client.chat.completions.create(model=model, messages=messages, web_search=False)
ai_response = response.choices[0].message.content
logger.debug(f"Received response from g4f: {ai_response}")
return ai_response
except Exception as e:
logger.error(f"Error in chat endpoint: {str(e)}")
return f"An error occurred: {str(e)}"
# Create the Gradio interface
iface = gr.Interface(
fn=chat_function,
inputs=[gr.Textbox(label="Enter your message", placeholder="Type something..."), gr.Dropdown(label="Model", choices=["gpt-4o-mini"], value="gpt-4o-mini")],
outputs="text",
live=True,
)
if __name__ == "__main__":
# Launch the Gradio interface, which will be automatically hosted on Hugging Face Spaces
iface.launch(share=True)