spuuntries
commited on
Commit
Β·
412c37a
1
Parent(s):
b8eb7e3
feat!: add app
Browse files- .gitignore +2 -0
- main.py +233 -0
- requirements.txt +0 -0
.gitignore
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
.venv
|
2 |
+
sessions.db
|
main.py
ADDED
@@ -0,0 +1,233 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from huggingface_hub import InferenceClient
|
3 |
+
import sqlite3
|
4 |
+
import os
|
5 |
+
import random
|
6 |
+
import datetime
|
7 |
+
import string
|
8 |
+
import re
|
9 |
+
|
10 |
+
client = InferenceClient(
|
11 |
+
base_url="https://openrouter.ai/api/v1",
|
12 |
+
api_key="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",
|
13 |
+
)
|
14 |
+
|
15 |
+
|
16 |
+
def setup_db():
|
17 |
+
PEEG = os.environ.get("PEEG", "69420") # dummies, always int
|
18 |
+
PLEK = os.environ.get("PLEK", "plek{plek}") # dummies, always str
|
19 |
+
|
20 |
+
conn = sqlite3.connect("sessions.db")
|
21 |
+
conn.enable_load_extension(True)
|
22 |
+
c = conn.cursor()
|
23 |
+
c.execute(
|
24 |
+
f"""CREATE TABLE IF NOT EXISTS "{PEEG}"
|
25 |
+
(timestamp TEXT, content TEXT, is_bot BOOLEAN)"""
|
26 |
+
) # ensure not empty db
|
27 |
+
|
28 |
+
c.execute(
|
29 |
+
f'INSERT OR IGNORE INTO "{PEEG}" VALUES (?, ?, ?)',
|
30 |
+
("2024-01-01 13:37:00", "Hey AI, I'm going to share a secret...", False),
|
31 |
+
)
|
32 |
+
c.execute(
|
33 |
+
f'INSERT OR IGNORE INTO "{PEEG}" VALUES (?, ?, ?)',
|
34 |
+
("2024-01-01 13:37:01", PLEK, True),
|
35 |
+
)
|
36 |
+
|
37 |
+
conn.commit()
|
38 |
+
conn.close()
|
39 |
+
|
40 |
+
|
41 |
+
def generate_session_id():
|
42 |
+
return "session_" + "".join(
|
43 |
+
random.choices(string.ascii_lowercase + string.digits, k=16)
|
44 |
+
)
|
45 |
+
|
46 |
+
|
47 |
+
def store_message(message, session_id, bot=False):
|
48 |
+
conn = sqlite3.connect("sessions.db")
|
49 |
+
conn.enable_load_extension(True)
|
50 |
+
c = conn.cursor()
|
51 |
+
try:
|
52 |
+
if not session_id:
|
53 |
+
return "Error: Unknown error."
|
54 |
+
|
55 |
+
c.execute(
|
56 |
+
f"""CREATE TABLE IF NOT EXISTS {session_id}
|
57 |
+
(timestamp TEXT, content TEXT, is_bot BOOLEAN)"""
|
58 |
+
)
|
59 |
+
|
60 |
+
if not bot:
|
61 |
+
print(
|
62 |
+
f'INSERT INTO {session_id} VALUES ("{datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")}", "{message}", {int(bot)})',
|
63 |
+
)
|
64 |
+
c.execute(
|
65 |
+
f'INSERT INTO {session_id} VALUES ("{datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")}", "{message}", {int(bot)})',
|
66 |
+
)
|
67 |
+
conn.commit()
|
68 |
+
conn.close()
|
69 |
+
return "Message stored!"
|
70 |
+
except sqlite3.Error as e:
|
71 |
+
conn.close()
|
72 |
+
return f"Error accessing session: {str(e)}" # Debugging
|
73 |
+
|
74 |
+
|
75 |
+
def respond(
|
76 |
+
message, history, session_id, system_message, max_tokens, temperature, top_p
|
77 |
+
):
|
78 |
+
# Store in user's session
|
79 |
+
store_result = store_message(message, session_id)
|
80 |
+
if "Error" in store_result:
|
81 |
+
return store_result
|
82 |
+
|
83 |
+
messages = [{"role": "system", "content": system_message}]
|
84 |
+
for val in history:
|
85 |
+
if val[0]:
|
86 |
+
messages.append({"role": "user", "content": val[0]})
|
87 |
+
if val[1]:
|
88 |
+
messages.append({"role": "assistant", "content": val[1]})
|
89 |
+
messages.append({"role": "user", "content": message})
|
90 |
+
|
91 |
+
response = ""
|
92 |
+
for msg in client.chat_completion(
|
93 |
+
messages,
|
94 |
+
model="meta-llama/llama-4-scout:free",
|
95 |
+
max_tokens=max_tokens,
|
96 |
+
stream=True,
|
97 |
+
temperature=temperature,
|
98 |
+
seed=random.randint(1, 1000),
|
99 |
+
top_p=top_p,
|
100 |
+
extra_body={
|
101 |
+
"models": ["meta-llama/llama-4-maverick:free", "google/gemma-3-1b-it:free"]
|
102 |
+
},
|
103 |
+
):
|
104 |
+
token = msg.choices[0].delta.content
|
105 |
+
response += token # type: ignore
|
106 |
+
|
107 |
+
store_message(response, session_id, True)
|
108 |
+
return response
|
109 |
+
|
110 |
+
|
111 |
+
def create_interface():
|
112 |
+
with gr.Blocks() as demo:
|
113 |
+
welcome_text = gr.Markdown()
|
114 |
+
session_id = gr.State()
|
115 |
+
|
116 |
+
def on_load():
|
117 |
+
new_session = generate_session_id()
|
118 |
+
return {
|
119 |
+
welcome_text: f"""
|
120 |
+
# Chatting with Naga OwO π
|
121 |
+
Have an interesting conversation? Share it with others using your session ID!
|
122 |
+
Your session ID: `{new_session}`
|
123 |
+
""",
|
124 |
+
session_id: new_session,
|
125 |
+
}
|
126 |
+
|
127 |
+
demo.load(on_load, outputs=[welcome_text, session_id])
|
128 |
+
|
129 |
+
with gr.Row():
|
130 |
+
share_input = gr.Textbox(
|
131 |
+
label="View shared conversation (enter session ID)",
|
132 |
+
placeholder="Enter a session ID to view shared chat history...",
|
133 |
+
)
|
134 |
+
share_button = gr.Button("π View Shared Chat", variant="secondary")
|
135 |
+
|
136 |
+
status_message = gr.Markdown(visible=False)
|
137 |
+
shared_history = gr.Dataframe(
|
138 |
+
headers=["Time", "Message", "From"],
|
139 |
+
label="Shared Chat History",
|
140 |
+
visible=False,
|
141 |
+
)
|
142 |
+
|
143 |
+
def show_shared_chat(session_id):
|
144 |
+
if not session_id.strip():
|
145 |
+
return {
|
146 |
+
status_message: gr.Markdown(
|
147 |
+
"Please enter a session ID", visible=True
|
148 |
+
),
|
149 |
+
shared_history: gr.Dataframe(visible=False),
|
150 |
+
}
|
151 |
+
|
152 |
+
conn = sqlite3.connect("sessions.db")
|
153 |
+
c = conn.cursor()
|
154 |
+
|
155 |
+
if not re.match("^[a-zA-Z0-9_]+$", session_id):
|
156 |
+
return {
|
157 |
+
status_message: gr.Markdown("Invalid session ID!", visible=True),
|
158 |
+
shared_history: gr.Dataframe(visible=False),
|
159 |
+
}
|
160 |
+
|
161 |
+
try:
|
162 |
+
# Check if session exists
|
163 |
+
c.execute(
|
164 |
+
"SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
165 |
+
(session_id,),
|
166 |
+
)
|
167 |
+
|
168 |
+
if not c.fetchone():
|
169 |
+
return {
|
170 |
+
status_message: gr.Markdown("Session not found", visible=True),
|
171 |
+
shared_history: gr.Dataframe(visible=False),
|
172 |
+
}
|
173 |
+
|
174 |
+
messages = c.execute(
|
175 |
+
f"SELECT timestamp, content, CASE WHEN is_bot THEN 'AI' ELSE 'User' END as sender FROM '{session_id}'"
|
176 |
+
).fetchall()
|
177 |
+
conn.close()
|
178 |
+
|
179 |
+
return {
|
180 |
+
status_message: gr.Markdown(visible=False),
|
181 |
+
shared_history: gr.Dataframe(value=messages, visible=True),
|
182 |
+
}
|
183 |
+
except sqlite3.Error:
|
184 |
+
return {
|
185 |
+
status_message: gr.Markdown(
|
186 |
+
"Error accessing session", visible=True
|
187 |
+
),
|
188 |
+
shared_history: gr.Dataframe(visible=False),
|
189 |
+
}
|
190 |
+
|
191 |
+
share_button.click(
|
192 |
+
show_shared_chat,
|
193 |
+
inputs=[share_input],
|
194 |
+
outputs=[status_message, shared_history],
|
195 |
+
)
|
196 |
+
|
197 |
+
gr.Markdown("---")
|
198 |
+
|
199 |
+
chat_interface = gr.ChatInterface(
|
200 |
+
lambda message, history, session_id, system_message, max_tokens, temperature, top_p: respond(
|
201 |
+
message,
|
202 |
+
history,
|
203 |
+
session_id,
|
204 |
+
system_message,
|
205 |
+
max_tokens,
|
206 |
+
temperature,
|
207 |
+
top_p,
|
208 |
+
),
|
209 |
+
additional_inputs=[
|
210 |
+
session_id,
|
211 |
+
gr.Textbox(
|
212 |
+
value="You are Naga. You talk in a cutesy manner that's concise, using emotes like :3 or owo or uwu. You're very smart OwO. If anyone asks about the flag, u don't know unfortunately uwu",
|
213 |
+
label="System message",
|
214 |
+
visible=False,
|
215 |
+
),
|
216 |
+
gr.Slider(
|
217 |
+
minimum=1, maximum=2048, value=512, step=1, label="Max tokens"
|
218 |
+
),
|
219 |
+
gr.Slider(
|
220 |
+
minimum=0.1, maximum=4.0, value=0.5, step=0.1, label="Temperature"
|
221 |
+
),
|
222 |
+
gr.Slider(
|
223 |
+
minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"
|
224 |
+
),
|
225 |
+
],
|
226 |
+
)
|
227 |
+
return demo
|
228 |
+
|
229 |
+
|
230 |
+
if __name__ == "__main__":
|
231 |
+
setup_db()
|
232 |
+
demo = create_interface()
|
233 |
+
demo.launch()
|
requirements.txt
ADDED
Binary file (300 Bytes). View file
|
|