Spaces:
Running
Running
File size: 2,697 Bytes
77500c8 c65c64a dfc360a c65c64a dfc360a c65c64a dfc360a 4cb128f c65c64a dfc360a c65c64a dfc360a 77500c8 dfc360a 77500c8 c65c64a 77500c8 dfc360a 4cb128f 04fa3eb 4cb128f dfc360a 4cb128f cf284d1 4cb128f cf284d1 2e2f239 77500c8 cf284d1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
import gradio as gr
from huggingface_hub import InferenceClient
# Custom background CSS with semi-transparent panel
css = """
body {
background-image: url('https://cdn-uploads.huggingface.co/production/uploads/67351c643fe51cb1aa28f2e5/wuyd5UYTh9jPrMJGmV9yC.jpeg');
background-size: cover;
background-position: center;
background-repeat: no-repeat;
}
#custom-title {
color: #d63384;
font-family: 'Playfair Display', serif;
font-size: 2.5rem;
font-weight: bold;
text-align: center;
margin-bottom: 20px;
}
#chat-panel {
background-color: rgba(255, 255, 255, 0.85);
padding: 2rem;
border-radius: 12px;
justify-content: center;
justify-content: center;
width: 100%;
max-width: 700px;
height: 70vh;
box-shadow: 0 0 12px rgba(0, 0, 0, 0.3);
overflow-y: auto;
}
"""
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def respond(message, history):
messages = [{"role": "system", "content": "You are a helpful French tutor."}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
stream=True,
):
token = message.choices[0].delta.content
response += token
yield response
with gr.Blocks(css=css) as demo:
# Title Markdown block
gr.Markdown("Japanese Instructor", elem_id="custom-title")
with gr.Column(elem_id="chat-panel"):
with gr.Accordion("⚙️ Advanced Settings", open=False):
system_message = gr.Textbox(
value="You are a helpful French tutor.",
label="System Message"
)
max_tokens = gr.Slider(
minimum=1,
maximum=2048,
value=512,
step=1,
label="Response Length"
)
temperature = gr.Slider(
minimum=0.1,
maximum=4.0,
value=0.7,
step=0.1,
label="Creativity"
)
top_p = gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Dynamic Text"
)
gr.ChatInterface(
respond,
additional_inputs=[
system_message,
max_tokens,
temperature,
top_p
]
)
if __name__ == "__main__":
demo.launch()
|