Deadmon commited on
Commit
4286e7c
·
verified ·
1 Parent(s): a0f90fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -13
app.py CHANGED
@@ -11,6 +11,7 @@ import logging
11
  from tiktoken import get_encoding
12
  from openai import AzureOpenAI
13
  import httpx
 
14
 
15
  # Configure logging
16
  logging.basicConfig(
@@ -161,7 +162,14 @@ class TextEditor:
161
 
162
  # OpenAIApi class
163
  class OpenAIApi:
164
- def __init__(self, preprompt="", endpoint="https://<your-resource-name>.openai.azure.com/", model="gpt-4o", api_key=None):
 
 
 
 
 
 
 
165
  # Use a minimal httpx.Client to avoid proxies parameter
166
  http_client = httpx.Client()
167
  self.client = AzureOpenAI(
@@ -256,7 +264,7 @@ class OpenAIApi:
256
  messages.extend({"role": c["role"], "content": c["text"]} for c in context)
257
  messages.append({"role": "user", "content": sanitized_prompt})
258
 
259
- logger.info(f"Sending request to model: {self.model}, messages: {json.dumps(messages, ensure_ascii=False)}")
260
 
261
  try:
262
  response = await self.client.chat.completions.create(
@@ -309,22 +317,28 @@ class OpenAIApi:
309
 
310
  except Exception as e:
311
  error_msg = f"API Error: {str(e)}"
312
- logger.error(f"API request failed: {error_msg}, endpoint: {self.client.azure_endpoint}, model: {self.model}")
313
  self.memory.add_chunk(error_msg, "system")
314
  return {"error": error_msg}
315
 
316
  # Gradio UI
317
  async def chat_submit(user_input, chat_history, preprompt):
318
- api = OpenAIApi(preprompt=preprompt, api_key=os.getenv("AZURE_OPENAI_API_KEY"))
319
- response = await api.fetch_response(user_input)
320
- if "error" in response:
321
- chat_history.append({"role": "assistant", "content": f"Error: {response['error']}"})
322
- logger.warning(f"Chat error: {response['error']}")
323
- else:
324
- chat_history.append({"role": "user", "content": user_input})
325
- chat_history.append({"role": "assistant", "content": response["content"]})
326
- logger.info("Chat response added to history")
327
- return chat_history, preprompt
 
 
 
 
 
 
328
 
329
  def get_history():
330
  memory = ConversationMemory()
@@ -372,6 +386,7 @@ async def edit_suffix(chunk_id, suffix):
372
  def create_ui():
373
  with gr.Blocks(title="Azure OpenAI Chat & Text Editor") as demo:
374
  gr.Markdown("# Azure OpenAI Chat with Text Editing")
 
375
 
376
  with gr.Tab("Chat"):
377
  chatbot = gr.Chatbot(label="Conversation", type="messages")
 
11
  from tiktoken import get_encoding
12
  from openai import AzureOpenAI
13
  import httpx
14
+ import re
15
 
16
  # Configure logging
17
  logging.basicConfig(
 
162
 
163
  # OpenAIApi class
164
  class OpenAIApi:
165
+ def __init__(self, preprompt="", endpoint="https://huggingface.co/spaces/Deadmon/copypasta.openai.azure.com/", model="gpt-4o", api_key=None):
166
+ # Validate endpoint format
167
+ if "<your-resource-name>" in endpoint:
168
+ logger.error("Invalid endpoint: Replace '<your-resource-name>' with your Azure OpenAI resource name")
169
+ raise ValueError("Invalid endpoint: Replace '<your-resource-name>' with your Azure OpenAI resource name (e.g., https://my-resource.openai.azure.com/)")
170
+ if not re.match(r"^https://[a-zA-Z0-9-]+\.openai\.azure\.com/?$", endpoint):
171
+ logger.warning(f"Endpoint format may be incorrect: {endpoint}. Expected format: https://<resource-name>.openai.azure.com/")
172
+
173
  # Use a minimal httpx.Client to avoid proxies parameter
174
  http_client = httpx.Client()
175
  self.client = AzureOpenAI(
 
264
  messages.extend({"role": c["role"], "content": c["text"]} for c in context)
265
  messages.append({"role": "user", "content": sanitized_prompt})
266
 
267
+ logger.info(f"Sending request to model: {self.model}, endpoint: {self.client._base_url}, messages: {json.dumps(messages, ensure_ascii=False)}")
268
 
269
  try:
270
  response = await self.client.chat.completions.create(
 
317
 
318
  except Exception as e:
319
  error_msg = f"API Error: {str(e)}"
320
+ logger.error(f"API request failed: {error_msg}, endpoint: {self.client._base_url}, model: {self.model}")
321
  self.memory.add_chunk(error_msg, "system")
322
  return {"error": error_msg}
323
 
324
  # Gradio UI
325
  async def chat_submit(user_input, chat_history, preprompt):
326
+ try:
327
+ api = OpenAIApi(preprompt=preprompt, api_key=os.getenv("AZURE_OPENAI_API_KEY"))
328
+ response = await api.fetch_response(user_input)
329
+ if "error" in response:
330
+ chat_history.append({"role": "assistant", "content": f"Error: {response['error']}"})
331
+ logger.warning(f"Chat error: {response['error']}")
332
+ else:
333
+ chat_history.append({"role": "user", "content": user_input})
334
+ chat_history.append({"role": "assistant", "content": response["content"]})
335
+ logger.info("Chat response added to history")
336
+ return chat_history, preprompt
337
+ except ValueError as e:
338
+ error_msg = f"Configuration Error: {str(e)}"
339
+ logger.error(error_msg)
340
+ chat_history.append({"role": "assistant", "content": error_msg})
341
+ return chat_history, preprompt
342
 
343
  def get_history():
344
  memory = ConversationMemory()
 
386
  def create_ui():
387
  with gr.Blocks(title="Azure OpenAI Chat & Text Editor") as demo:
388
  gr.Markdown("# Azure OpenAI Chat with Text Editing")
389
+ gr.Markdown("**Note**: Ensure the Azure OpenAI endpoint is set correctly in `app.py` (replace `<your-resource-name>` with your resource name).")
390
 
391
  with gr.Tab("Chat"):
392
  chatbot = gr.Chatbot(label="Conversation", type="messages")