Spaces:
Sleeping
Sleeping
File size: 1,463 Bytes
ac2109e a369dfd ac2109e 28b84f1 ac2109e 28b84f1 65de657 28b84f1 1fb9909 28b84f1 1fb9909 28b84f1 ac2109e 73af1d0 ac2109e 73af1d0 ac2109e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
import gradio as gr
import os
import requests
API_TOKEN = os.getenv("HF_READ_TOKEN")
headers = {"Authorization": f"Bearer {API_TOKEN}"}
def gen(prompt, model='gpt'):
if prompt == "" or prompt == None:
return None
if model == 'gpt':
API_URL = "https://api-inference.huggingface.co/models/openchat/openchat_3.5"
payload = {
"inputs": prompt
}
response = requests.post(API_URL, headers=headers, json=payload)
if response.status_code != 200:
print(f"Ошибка: Не удалось получить ответ. Статус ответа: {response.status_code}")
print(f"Содержимое ответа: {response.text}")
return f"Произошла ошибка ({response.text})"
return response.json()
css = """
footer {visibility: hidden !important;}
"""
with gr.Blocks(css=css) as ui:
with gr.Tab("Генерация"):
with gr.Row():
prompt = gr.Textbox(label="Prompt", lines=3)
with gr.Tab("Параметры"):
with gr.Row():
gr.Markdown("## Скоро...")
text_button = gr.Button("Генерация", variant='primary', elem_id="generate")
text_output = gr.Textbox(show_label=False, placeholder="Привет! Чем я могу вам помочь?")
text_button.click(gen, inputs=[prompt], outputs=text_output)
#end
ui.queue(api_open=False).launch() |