File size: 1,959 Bytes
74399fc
 
2a73eb1
74399fc
af29523
 
74399fc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
736cf1f
74399fc
 
736cf1f
 
 
 
 
 
74399fc
 
 
736cf1f
 
 
74399fc
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import gradio as gr
from huggingface_hub import InferenceClient
client = InferenceClient("https://7896-24-125-188-125.ngrok-free.app/v1/chat/completions")

FIXED_MAX_TOKENS = 1024
FIXED_TEMPERATURE = 1
FIXED_TOP_P = 0.95

def respond(message, history):
    # --- Syntax Error was here ---
    # Corrected: Initialize messages as an empty list.
    # If you had a system message previously, you might want to add it back, e.g.:
    # FIXED_SYSTEM_MESSAGE = "Your system message here"
    # messages = [{"role": "system", "content": FIXED_SYSTEM_MESSAGE}]
    messages = []
    # --- End of correction ---

    for user_message, ai_message in history:
        if user_message:
            messages.append({"role": "user", "content": user_message})
        if ai_message:
            messages.append({"role": "assistant", "content": ai_message})

    messages.append({"role": "user", "content": message})

    response = ""

    try:
        for chunk in client.chat.completions.create(
            messages=messages,
            max_tokens=FIXED_MAX_TOKENS,
            stream=True,
            temperature=FIXED_TEMPERATURE,
            top_p=FIXED_TOP_P,

        ):
            if chunk.choices[0].delta.content is not None:
                token = chunk.choices[0].delta.content
                response += token
                yield response
    except Exception as e:
        yield f"An error occurred: {e}"

header_image_path = "https://cdn-uploads.huggingface.co/production/uploads/6540a02d1389943fef4d2640/j61iZTDaK9g0UW3aWGwWi.gif"


with gr.Blocks(theme=gr.themes.Soft()) as demo:

    gr.Image(
        value=header_image_path,
        label="Chatbot Header",
        show_label=False,
        interactive=False,
        height=100,
        elem_id="chatbot-logo"
    )

    gr.ChatInterface(
        respond,
        chatbot=gr.Chatbot(
            height=700
        )
    )

if __name__ == "__main__":
    demo.launch(show_api=False, share=True)