Update app.py
Browse files
app.py
CHANGED
@@ -2,15 +2,15 @@ import gradio as gr
|
|
2 |
import asyncio
|
3 |
import json
|
4 |
import html
|
5 |
-
|
6 |
-
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
|
7 |
-
from tiktoken import get_encoding
|
8 |
-
import sqlite3
|
9 |
import uuid
|
|
|
10 |
import datetime
|
11 |
import difflib
|
|
|
|
|
12 |
|
13 |
-
#
|
14 |
class ConversationMemory:
|
15 |
def __init__(self, db_path="conversation.db"):
|
16 |
self.conn = sqlite3.connect(db_path)
|
@@ -66,6 +66,7 @@ class ConversationMemory:
|
|
66 |
cursor = self.conn.execute("SELECT * FROM conversation_chunks ORDER BY timestamp DESC LIMIT ?", (limit,))
|
67 |
return [{"chunk_id": row[0], "text": row[1], "role": row[2], "timestamp": row[3], "intent": row[4], "token_count": row[5]} for row in cursor]
|
68 |
|
|
|
69 |
class TextEditor:
|
70 |
def __init__(self, memory):
|
71 |
self.memory = memory
|
@@ -103,7 +104,7 @@ class TextEditor:
|
|
103 |
chunk = self.memory.get_chunk(chunk_id)
|
104 |
if chunk:
|
105 |
chunk['text'] = chunk['text'] + suffix
|
106 |
-
self.memory.update_chunk(chunk_id, chunk['text'])
|
107 |
return chunk['text']
|
108 |
|
109 |
def diff(self, chunk_id, original_text):
|
@@ -114,15 +115,14 @@ class TextEditor:
|
|
114 |
return '\n'.join(diff)
|
115 |
return ""
|
116 |
|
|
|
117 |
class OpenAIApi:
|
118 |
-
def __init__(self, preprompt="", endpoint="https://T-App-GPT4o.openai.azure.com/openai/v1/", model="gpt-4o"):
|
119 |
-
token_provider = get_bearer_token_provider(
|
120 |
-
DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
|
121 |
-
)
|
122 |
self.client = AzureOpenAI(
|
123 |
azure_endpoint=endpoint,
|
124 |
-
|
125 |
-
api_version="2025-01-01-preview"
|
|
|
126 |
)
|
127 |
self.model = model
|
128 |
self.preprompt = preprompt
|
@@ -159,7 +159,7 @@ class OpenAIApi:
|
|
159 |
},
|
160 |
{
|
161 |
"type": "function",
|
162 |
-
"name": "
|
163 |
"description": "Paste clipboard content into a conversation chunk.",
|
164 |
"parameters": {
|
165 |
"type": "object",
|
@@ -233,7 +233,7 @@ class OpenAIApi:
|
|
233 |
response_chunk_id = self.memory.add_chunk(full_response, "assistant")
|
234 |
|
235 |
for tool_call in tool_calls:
|
236 |
-
if tool_call
|
237 |
func_name = tool_call.function.name
|
238 |
args = json.loads(tool_call.function.arguments)
|
239 |
if func_name == "cut_text":
|
@@ -261,13 +261,15 @@ class OpenAIApi:
|
|
261 |
self.memory.add_chunk(error_msg, "system")
|
262 |
return {"error": error_msg}
|
263 |
|
264 |
-
# Gradio UI
|
265 |
async def chat_submit(user_input, chat_history, preprompt):
|
266 |
-
api = OpenAIApi(preprompt=preprompt)
|
267 |
response = await api.fetch_response(user_input)
|
268 |
if "error" in response:
|
269 |
-
|
270 |
-
|
|
|
|
|
271 |
return chat_history, preprompt
|
272 |
|
273 |
def get_history():
|
@@ -278,27 +280,27 @@ def select_chunk(evt: gr.SelectData):
|
|
278 |
return evt.value["chunk_id"], evt.value["text"]
|
279 |
|
280 |
async def edit_cut(chunk_id, start, end):
|
281 |
-
api = OpenAIApi()
|
282 |
result = api.editor.cut(chunk_id, int(start), int(end))
|
283 |
return result, api.editor.diff(chunk_id, result)
|
284 |
|
285 |
async def edit_copy(chunk_id, start, end):
|
286 |
-
api = OpenAIApi()
|
287 |
result = api.editor.copy(chunk_id, int(start), int(end))
|
288 |
return result, ""
|
289 |
|
290 |
async def edit_paste(chunk_id, position):
|
291 |
-
api = OpenAIApi()
|
292 |
result = api.editor.paste(chunk_id, int(position))
|
293 |
return result, api.editor.diff(chunk_id, result)
|
294 |
|
295 |
async def edit_prefix(chunk_id, prefix):
|
296 |
-
api = OpenAIApi()
|
297 |
result = api.editor.add_prefix(chunk_id, prefix)
|
298 |
return result, api.editor.diff(chunk_id, result)
|
299 |
|
300 |
async def edit_suffix(chunk_id, suffix):
|
301 |
-
api = OpenAIApi()
|
302 |
result = api.editor.add_suffix(chunk_id, suffix)
|
303 |
return result, api.editor.diff(chunk_id, result)
|
304 |
|
@@ -307,7 +309,7 @@ def create_ui():
|
|
307 |
gr.Markdown("# Azure OpenAI Chat with Text Editing")
|
308 |
|
309 |
with gr.Tab("Chat"):
|
310 |
-
chatbot = gr.Chatbot(label="Conversation")
|
311 |
user_input = gr.Textbox(label="Your Message", placeholder="Type your message or editing command...")
|
312 |
preprompt = gr.Textbox(label="System Prompt", value="You are a helpful assistant with text editing capabilities.")
|
313 |
submit_btn = gr.Button("Send")
|
@@ -358,4 +360,4 @@ def create_ui():
|
|
358 |
|
359 |
if __name__ == "__main__":
|
360 |
demo = create_ui()
|
361 |
-
demo.launch()
|
|
|
2 |
import asyncio
|
3 |
import json
|
4 |
import html
|
5 |
+
import os
|
|
|
|
|
|
|
6 |
import uuid
|
7 |
+
import sqlite3
|
8 |
import datetime
|
9 |
import difflib
|
10 |
+
from tiktoken import get_encoding
|
11 |
+
from openai import AzureOpenAI
|
12 |
|
13 |
+
# ConversationMemory class (unchanged)
|
14 |
class ConversationMemory:
|
15 |
def __init__(self, db_path="conversation.db"):
|
16 |
self.conn = sqlite3.connect(db_path)
|
|
|
66 |
cursor = self.conn.execute("SELECT * FROM conversation_chunks ORDER BY timestamp DESC LIMIT ?", (limit,))
|
67 |
return [{"chunk_id": row[0], "text": row[1], "role": row[2], "timestamp": row[3], "intent": row[4], "token_count": row[5]} for row in cursor]
|
68 |
|
69 |
+
# TextEditor class (unchanged)
|
70 |
class TextEditor:
|
71 |
def __init__(self, memory):
|
72 |
self.memory = memory
|
|
|
104 |
chunk = self.memory.get_chunk(chunk_id)
|
105 |
if chunk:
|
106 |
chunk['text'] = chunk['text'] + suffix
|
107 |
+
self.memory It.update_chunk(chunk_id, chunk['text'])
|
108 |
return chunk['text']
|
109 |
|
110 |
def diff(self, chunk_id, original_text):
|
|
|
115 |
return '\n'.join(diff)
|
116 |
return ""
|
117 |
|
118 |
+
# OpenAIApi class (modified to fix proxies error)
|
119 |
class OpenAIApi:
|
120 |
+
def __init__(self, preprompt="", endpoint="https://T-App-GPT4o.openai.azure.com/openai/v1/", model="gpt-4o", api_key=None):
|
|
|
|
|
|
|
121 |
self.client = AzureOpenAI(
|
122 |
azure_endpoint=endpoint,
|
123 |
+
api_key=api_key or os.getenv("AZURE_OPENAI_API_KEY"),
|
124 |
+
api_version="2025-01-01-preview",
|
125 |
+
http_client_kwargs={"proxies": None} # Explicitly disable proxies
|
126 |
)
|
127 |
self.model = model
|
128 |
self.preprompt = preprompt
|
|
|
159 |
},
|
160 |
{
|
161 |
"type": "function",
|
162 |
+
"name": "paste銉嗐偔銈广儓",
|
163 |
"description": "Paste clipboard content into a conversation chunk.",
|
164 |
"parameters": {
|
165 |
"type": "object",
|
|
|
233 |
response_chunk_id = self.memory.add_chunk(full_response, "assistant")
|
234 |
|
235 |
for tool_call in tool_calls:
|
236 |
+
if tool_call and hasattr(tool_call, 'function'):
|
237 |
func_name = tool_call.function.name
|
238 |
args = json.loads(tool_call.function.arguments)
|
239 |
if func_name == "cut_text":
|
|
|
261 |
self.memory.add_chunk(error_msg, "system")
|
262 |
return {"error": error_msg}
|
263 |
|
264 |
+
# Updated Gradio UI with messages format
|
265 |
async def chat_submit(user_input, chat_history, preprompt):
|
266 |
+
api = OpenAIApi(preprompt=preprompt, api_key=os.getenv("AZURE_OPENAI_API_KEY"))
|
267 |
response = await api.fetch_response(user_input)
|
268 |
if "error" in response:
|
269 |
+
chat_history.append({"role": "assistant", "content": f"Error: {response['error']}"})
|
270 |
+
else:
|
271 |
+
chat_history.append({"role": "user", "content": user_input})
|
272 |
+
chat_history.append({"role": "assistant", "content": response["content"]})
|
273 |
return chat_history, preprompt
|
274 |
|
275 |
def get_history():
|
|
|
280 |
return evt.value["chunk_id"], evt.value["text"]
|
281 |
|
282 |
async def edit_cut(chunk_id, start, end):
|
283 |
+
api = OpenAIApi(api_key=os.getenv("AZ URE_OPENAI_API_KEY"))
|
284 |
result = api.editor.cut(chunk_id, int(start), int(end))
|
285 |
return result, api.editor.diff(chunk_id, result)
|
286 |
|
287 |
async def edit_copy(chunk_id, start, end):
|
288 |
+
api = OpenAIApi(api_key=os.getenv("AZURE_OPENAI_API_KEY"))
|
289 |
result = api.editor.copy(chunk_id, int(start), int(end))
|
290 |
return result, ""
|
291 |
|
292 |
async def edit_paste(chunk_id, position):
|
293 |
+
api = OpenAIApi(api_key=os.getenv("AZURE_OPENAI_API_KEY"))
|
294 |
result = api.editor.paste(chunk_id, int(position))
|
295 |
return result, api.editor.diff(chunk_id, result)
|
296 |
|
297 |
async def edit_prefix(chunk_id, prefix):
|
298 |
+
api = OpenAIApi(api_key=os.getenv("AZURE_OPENAI_API_KEY"))
|
299 |
result = api.editor.add_prefix(chunk_id, prefix)
|
300 |
return result, api.editor.diff(chunk_id, result)
|
301 |
|
302 |
async def edit_suffix(chunk_id, suffix):
|
303 |
+
api = OpenAIApi(api_key=os.getenv("AZURE_OPENAI_API_KEY"))
|
304 |
result = api.editor.add_suffix(chunk_id, suffix)
|
305 |
return result, api.editor.diff(chunk_id, result)
|
306 |
|
|
|
309 |
gr.Markdown("# Azure OpenAI Chat with Text Editing")
|
310 |
|
311 |
with gr.Tab("Chat"):
|
312 |
+
chatbot = gr.Chatbot(label="Conversation", type="messages") # Updated to messages format
|
313 |
user_input = gr.Textbox(label="Your Message", placeholder="Type your message or editing command...")
|
314 |
preprompt = gr.Textbox(label="System Prompt", value="You are a helpful assistant with text editing capabilities.")
|
315 |
submit_btn = gr.Button("Send")
|
|
|
360 |
|
361 |
if __name__ == "__main__":
|
362 |
demo = create_ui()
|
363 |
+
demo.launch(server_name="0.0.0.0", server_port=7860)
|