File size: 2,434 Bytes
3c9af9e
325df34
 
 
 
 
9327c1f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
325df34
 
9327c1f
 
 
325df34
9327c1f
 
 
 
325df34
9327c1f
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import gradio as gr
from gradio_client import Client

# Call the existing model
client = Client("Futuresony/Mr.Events")

# Function to interact with the hosted model
def chat_with_model(user_input, chat_history):
    """
    Sends user input and chat history to the hosted model and returns the response
    and updated history.
    """
    # The hosted Gradio app expects the chat history as a list of [user, assistant] pairs.
    # Initial call will have chat_history as an empty list.
    # Subsequent calls will have chat_history including previous turns.

    print(f"Client sending query: {user_input}")
    print(f"Client sending history: {chat_history}")

    try:
        # Call the hosted model's chat endpoint
        # Pass user_input and chat_history as positional arguments
        result = client.predict(
            user_input,
            chat_history, # Pass the history from the client's Chatbot
            api_name="/chat"
        )
        print(f"Client received raw result: {result}")

        # The hosted app's `chat` function returns the final response string.
        # We need to append the user input and the model's response to the history.
        updated_history = chat_history + [[user_input, result]]
        return "", updated_history # Return empty string for textbox and updated history

    except Exception as e:
        print(f"Error during client prediction: {e}")
        import traceback
        print(traceback.format_exc())
        # Append user input and an error message to history
        error_message = f"An error occurred while communicating with the model: {e}"
        updated_history = chat_history + [[user_input, error_message]]
        return "", updated_history


# Create the desktop-friendly interface
with gr.Blocks() as demo:
    gr.Markdown("## 💬 Test the ABSA Model Chat")
    chatbot = gr.Chatbot(height=400) # Chatbot to display conversation
    msg = gr.Textbox(label="Type your message") # Textbox for user input
    clear = gr.ClearButton([msg, chatbot]) # Button to clear

    # Link the input, button, and chatbot
    # The fn will receive the textbox value and the chatbot history.
    # It will return an empty string for the textbox and the updated history for the chatbot.
    msg.submit(chat_with_model, [msg, chatbot], [msg, chatbot])

# Launch the Gradio interface
demo.launch(debug=True, show_error=True) # Corrected launch call and added show_error