File size: 3,761 Bytes
ef37daa
711f069
 
f34df8a
711f069
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f34df8a
 
711f069
f944272
 
c968936
 
f34df8a
 
 
09cca9f
 
f34df8a
 
c968936
 
f944272
 
 
 
 
 
 
 
 
c968936
f944272
 
 
81e3034
 
 
 
f34df8a
711f069
f944272
 
f34df8a
 
81e3034
 
 
 
 
f34df8a
 
81e3034
 
f34df8a
 
 
 
 
 
 
 
 
 
 
81e3034
f34df8a
 
 
 
 
 
81e3034
 
 
f34df8a
 
f944272
e1ff28f
f944272
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
import gradio as gr
import os
from huggingface_hub import InferenceClient
import time

hf_token = os.getenv("hf_token")

client = InferenceClient(api_key=hf_token)

def get_response(user_input):
    messages = [
        { "role": "system", "content": "you are xylaria 1.4 senoa, developed by sk md saad amin" },
        { "role": "user", "content": user_input }
    ]
    
    stream = client.chat.completions.create(
        model="Qwen/QwQ-32B-Preview", 
        messages=messages, 
        temperature=0.5,
        max_tokens=10240,
        top_p=0.7,
        stream=True
    )
    
    response = ""
    for chunk in stream:
        response += chunk.choices[0].delta.content
        yield response  # Yielding progressively as the model generates output
        time.sleep(0.05)  # Optional: Adjust speed of the stream (in seconds)

def chat_interface():
    with gr.Blocks() as demo:
        with gr.Row():  # No 'min_width' argument here
            with gr.Column():  # No 'min_width' argument here
                chat_output = gr.Chatbot(
                    elem_id="chat-box",
                    label="Xylaria 1.4 Senoa Chatbot",
                    show_label=False,
                    type="messages"  # Specify type for correct message format
                )

        with gr.Row(elem_id="input-row"):
            with gr.Column():  # No 'min_width' argument here
                input_textbox = gr.Textbox(
                    label="Type your message", 
                    placeholder="Ask me anything...",
                    lines=1,
                    max_lines=3,
                    interactive=True,
                    elem_id="user-input",
                    show_label=False
                )
            with gr.Column():
                send_button = gr.Button("Send", elem_id="send-btn")

        def submit_input(user_input, chat_history):
            # Initialize the chat history if it's empty
            if not chat_history:
                chat_history = []
            chat_history.append({"role": "user", "content": user_input})
            return "", chat_history  # Clear input field

        input_textbox.submit(submit_input, [input_textbox, chat_output], [input_textbox, chat_output])
        send_button.click(submit_input, [input_textbox, chat_output], [input_textbox, chat_output])

        def handle_response(user_input, chat_history):
            # Initialize the chat history if it's empty
            if not chat_history:
                chat_history = []
            chat_history.append({"role": "user", "content": user_input})
            
            response_stream = get_response(user_input)
            for partial_response in response_stream:
                chat_history[-1] = {"role": "user", "content": user_input}  # Update the last user message with content
                chat_history.append({"role": "assistant", "content": partial_response})  # Add assistant's response
                yield "", chat_history  # Return the updated chat history progressively

        input_textbox.submit(handle_response, [input_textbox, chat_output], [input_textbox, chat_output])
        send_button.click(handle_response, [input_textbox, chat_output], [input_textbox, chat_output])

    demo.css = """
    #input-row {
        position: absolute;
        bottom: 10px;
        width: 100%;
        padding: 10px;
        background-color: #333;  /* Dark background */
        border-top: 1px solid #ddd;
    }
    #chat-box {
        height: calc(100vh - 100px); /* Adjust the height of chat history */
        overflow-y: scroll;
    }
    #user-input, #send-btn {
        background-color: #333;  /* Keep dark gray background */
    }
    """

    return demo

demo = chat_interface()
demo.launch()