Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,6 +9,7 @@ def call_api(prompt, history_json, system_prompt, temperature, max_new_tokens, t
|
|
9 |
history = json.loads(history_json)
|
10 |
except json.JSONDecodeError:
|
11 |
return "History input must be valid JSON. E.g., [[\"User\", \"Bot response\"], [\"User2\", \"Response2\"]]"
|
|
|
12 |
data = {
|
13 |
"prompt": prompt,
|
14 |
"history": history,
|
@@ -18,6 +19,7 @@ def call_api(prompt, history_json, system_prompt, temperature, max_new_tokens, t
|
|
18 |
"top_p": top_p,
|
19 |
"repetition_penalty": repetition_penalty
|
20 |
}
|
|
|
21 |
try:
|
22 |
response = requests.post(url, json=data)
|
23 |
response.raise_for_status() # Raises HTTPError for bad requests
|
@@ -29,7 +31,7 @@ def main_interface():
|
|
29 |
with gr.Blocks() as demo:
|
30 |
with gr.Row():
|
31 |
prompt = gr.Textbox(label="Prompt", placeholder="Enter your prompt here...")
|
32 |
-
history = gr.TextArea(label="History", placeholder="Enter history as JSON array...",
|
33 |
system_prompt = gr.Textbox(label="System Prompt", placeholder="λ°λμ νκΈλ‘ λ΅λ³νλΌ")
|
34 |
temperature = gr.Number(label="Temperature", value=0.3)
|
35 |
max_new_tokens = gr.Number(label="Max New Tokens", value=1048)
|
|
|
9 |
history = json.loads(history_json)
|
10 |
except json.JSONDecodeError:
|
11 |
return "History input must be valid JSON. E.g., [[\"User\", \"Bot response\"], [\"User2\", \"Response2\"]]"
|
12 |
+
|
13 |
data = {
|
14 |
"prompt": prompt,
|
15 |
"history": history,
|
|
|
19 |
"top_p": top_p,
|
20 |
"repetition_penalty": repetition_penalty
|
21 |
}
|
22 |
+
|
23 |
try:
|
24 |
response = requests.post(url, json=data)
|
25 |
response.raise_for_status() # Raises HTTPError for bad requests
|
|
|
31 |
with gr.Blocks() as demo:
|
32 |
with gr.Row():
|
33 |
prompt = gr.Textbox(label="Prompt", placeholder="Enter your prompt here...")
|
34 |
+
history = gr.TextArea(label="History", placeholder="Enter history as JSON array...", visible_lines=3)
|
35 |
system_prompt = gr.Textbox(label="System Prompt", placeholder="λ°λμ νκΈλ‘ λ΅λ³νλΌ")
|
36 |
temperature = gr.Number(label="Temperature", value=0.3)
|
37 |
max_new_tokens = gr.Number(label="Max New Tokens", value=1048)
|