File size: 3,072 Bytes
4f7f4f3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import gradio as gr
import requests
from huggingface_hub import InferenceClient

client = InferenceClient("mistralai/Mistral-Nemo-Instruct-2407")

# Sabit GitHub raw URL'si
GITHUB_RAW_URL = "https://raw.githubusercontent.com/ALPERALL/AlpDroid/main/prompt.txt"


def fetch_system_message():
    """Fetch system message from a GitHub raw link."""
    try:
        response = requests.get(GITHUB_RAW_URL)
        response.raise_for_status()
        return response.text.strip()
    except requests.exceptions.RequestException as e:
        return f"Error fetching system message: {str(e)}"


def respond(message, history):
    # Sabit parametreler
    max_tokens = 512
    temperature = 0.7
    top_p = 0.95

    # Fetch the system message from GitHub
    system_message = fetch_system_message()
    if system_message.startswith("Error"):
        yield system_message
        return

    messages = [{"role": "system", "content": system_message}]

    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    messages.append({"role": "user", "content": message})

    response = ""

    for message in client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = message.choices[0].delta.content
        response += token
        yield response


# Koyu tema tanımlama
theme = gr.themes.Soft(
    primary_hue="emerald",
    secondary_hue="emerald",
    neutral_hue="gray",
    font=[
        gr.themes.GoogleFont("Exo"),
        "ui-sans-serif",
        "system-ui",
        "sans-serif"
    ]
).set(
    body_background_fill_dark="#010409",
    block_background_fill_dark="#010409",
    block_border_width="1px",
    block_title_background_fill_dark="#1e1c26",
    input_background_fill_dark="#161b22",
    button_secondary_background_fill_dark="#21262d",
    border_color_accent_dark="#2f353c",
    border_color_primary_dark="#2f353c",
    background_fill_secondary_dark="#010409",
    color_accent_soft_dark="transparent",
    code_background_fill_dark="#0d1117",
)

# Demo başlatılıyor
with gr.Blocks(theme=theme) as demo:
    with gr.Row(elem_id="chuanhu-header"):
        gr.HTML("<h1>Chatbot Arayüzü</h1>", elem_id="app-title")
        status_display = gr.Markdown("Status: Ready", elem_id="status-display")

    with gr.Row(elem_id="chuanhu-body"):
        with gr.Column(elem_id="chatbot-area"):
            chatbot = gr.Chatbot(label="Chatbot", elem_id="chuanhu-chatbot")
            user_input = gr.Textbox(show_label=False, placeholder="Buraya yazın...", elem_id="user-input-tb")
            submit_btn = gr.Button("Gönder", variant="primary", elem_id="submit-btn")

    with gr.Row(elem_id="chuanhu-footer"):
        gr.Markdown("Chatbot Arayüzü", elem_id="footer")

    submit_btn.click(respond, inputs=[user_input, chatbot], outputs=[chatbot])

if __name__ == "__main__":
    demo.launch(share=True)