Spaces:
Sleeping
Sleeping
import gradio as gr | |
from helper import * | |
from prompts_and_chema import * | |
def full_chat_pipeline(user_query, openai_key): | |
try: | |
# β Step 1: Set the OpenAI API key | |
openai.api_key = openai_key | |
# Step 1: Get intent from first GPT call | |
raw_response = chat_with_gpt(intent_system_prompt, user_query, use_schema=True, schema=intent_output_schema ) | |
# print(f"raw_response: {raw_response}") | |
intents = json.loads(raw_response) if isinstance(raw_response, str) else raw_response | |
# print(f"Intents: {intents}") | |
# Step 2: Format prompt with detected intent | |
loaded_texts = load_intent_texts(intents, get_txt_files) | |
#selected_intents = ', '.join(intents.get("intents", [])) | |
selected_intent_description = load_intent_description(intents, intent_description_map) | |
print(selected_intent_description) | |
context_str = '\n\n'.join(f"{k}:\n{v}" for k, v in loaded_texts.items()) | |
formatted_prompt = f''' | |
User query: {user_query} | |
Selected Intents: {selected_intent_description} | |
Context: {context_str} | |
''' | |
# Step 3: Get final answer from second GPT call | |
final_response = chat_with_gpt(get_answer_system_prompt, formatted_prompt) | |
# print("Final Response:", final_response) | |
return f"{str(intents)}", f"{final_response}" | |
except Exception as e: | |
return f"Error while classifying intents: {str(e)}", f"Error while generating response: {str(e)}" | |
# Gradio UI | |
with gr.Blocks(title="Degirum LLM") as demo: | |
gr.Markdown("## Degirum LLM") | |
gr.Markdown("Ask questions...") | |
# π OpenAI Key on top | |
openai_key = gr.Textbox(label="π OpenAI API Key", placeholder="Enter your OpenAI API key...", type="password") | |
# Row with query input and intent output side by side | |
with gr.Row(): | |
user_query = gr.Textbox(label="π¬ Query", placeholder="Type your question here...", lines=3) | |
intent_output = gr.Textbox(label="π§ Classified Intents", placeholder="will get answer by AI", lines=3, interactive=False) | |
# π Submit Button | |
submit_btn = gr.Button("π Submit", variant="primary") | |
# π€ AI Response at the bottom | |
response_output = gr.Textbox(label="π€ Full AI Response", lines=10, interactive=False) | |
submit_btn.click( | |
fn=full_chat_pipeline, | |
inputs=[user_query, openai_key], | |
outputs=[intent_output, response_output] | |
) | |
if __name__ == "__main__": | |
demo.launch() | |