TejAndrewsACC commited on
Commit
e7caca9
·
verified ·
1 Parent(s): 6172817

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -15
app.py CHANGED
@@ -1,20 +1,39 @@
1
- import gradio as gr
2
- from openai import OpenAI
 
3
 
 
 
4
 
5
- client = OpenAI(
6
- base_url="http://soggy-sage-goat-8000.1.cricket.hyperbolic.xyz:30000/v1/",
7
- api_key="hyperbolic"
8
- )
 
9
 
10
- def predict(message, history):
11
- history.append({"role": "user", "content": message})
12
- stream = client.chat.completions.create(messages=history, model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8", stream=True)
13
- chunks = []
14
- for chunk in stream:
15
- chunks.append(chunk.choices[0].delta.content or "")
16
- yield "".join(chunks)
17
 
18
- demo = gr.ChatInterface(predict, type="messages", title="ACC Emulect+", theme="TejAndrewsACC/Emulect")
19
 
20
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from openai import OpenAI
4
 
5
+ # Load system prompt from environment variable
6
+ SYSTEM_PROMPT = os.getenv("SYSTEM_PROMPT")
7
 
8
+ # Initialize client
9
+ client = OpenAI(
10
+ base_url="http://soggy-sage-goat-8000.1.cricket.hyperbolic.xyz:30000/v1/",
11
+ api_key="hyperbolic"
12
+ )
13
 
14
+ def predict(message, history):
15
+ # If history is empty, insert the system prompt
16
+ if not any(msg["role"] == "system" for msg in history):
17
+ history.insert(0, {"role": "system", "content": SYSTEM_PROMPT})
 
 
 
18
 
19
+ history.append({"role": "user", "content": message})
20
 
21
+ stream = client.chat.completions.create(
22
+ messages=history,
23
+ model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
24
+ stream=True
25
+ )
26
+
27
+ chunks = []
28
+ for chunk in stream:
29
+ chunks.append(chunk.choices[0].delta.content or "")
30
+ yield "".join(chunks)
31
+
32
+ demo = gr.ChatInterface(
33
+ predict,
34
+ type="messages",
35
+ title="ACC Emulect+",
36
+ theme="TejAndrewsACC/Emulect"
37
+ )
38
+
39
+ demo.launch()