Update azure_openai.py
Browse files- azure_openai.py +4 -3
azure_openai.py
CHANGED
@@ -1,7 +1,8 @@
|
|
1 |
import httpx
|
2 |
import logging
|
3 |
import os
|
4 |
-
from pipecat.frames import TextFrame # Updated import
|
|
|
5 |
from pipecat.processors.frame_processor import FrameProcessor, FrameDirection
|
6 |
|
7 |
logger = logging.getLogger(__name__)
|
@@ -11,7 +12,7 @@ class AzureOpenAILLMService(FrameProcessor):
|
|
11 |
super().__init__()
|
12 |
self.api_key = os.environ.get("azure_openai")
|
13 |
if not self.api_key:
|
14 |
-
logger.error("Missing
|
15 |
raise ValueError("Azure OpenAI API key not found in environment variable 'azure_openai'")
|
16 |
self.preprompt = preprompt
|
17 |
self.endpoint = endpoint
|
@@ -45,7 +46,7 @@ class AzureOpenAILLMService(FrameProcessor):
|
|
45 |
if "choices" in result and len(result["choices"]) > 0:
|
46 |
content = result["choices"][0]["message"]["content"]
|
47 |
continue_flag = len(content) >= 4000
|
48 |
-
await self.push_frame(
|
49 |
else:
|
50 |
logger.error("No valid content in API response")
|
51 |
await self.push_frame(TextFrame("Error: No valid response from LLM"))
|
|
|
1 |
import httpx
|
2 |
import logging
|
3 |
import os
|
4 |
+
from pipecat.frames import TextFrame, LLMResponseFrame # Updated import
|
5 |
+
from pipecat.services.openai.llm import OpenAILLMService # Updated import
|
6 |
from pipecat.processors.frame_processor import FrameProcessor, FrameDirection
|
7 |
|
8 |
logger = logging.getLogger(__name__)
|
|
|
12 |
super().__init__()
|
13 |
self.api_key = os.environ.get("azure_openai")
|
14 |
if not self.api_key:
|
15 |
+
logger.error("Missing Azure OpenAI API key: azure_openai")
|
16 |
raise ValueError("Azure OpenAI API key not found in environment variable 'azure_openai'")
|
17 |
self.preprompt = preprompt
|
18 |
self.endpoint = endpoint
|
|
|
46 |
if "choices" in result and len(result["choices"]) > 0:
|
47 |
content = result["choices"][0]["message"]["content"]
|
48 |
continue_flag = len(content) >= 4000
|
49 |
+
await self.push_frame(LLMResponseFrame(content=content, continue_flag=continue_flag))
|
50 |
else:
|
51 |
logger.error("No valid content in API response")
|
52 |
await self.push_frame(TextFrame("Error: No valid response from LLM"))
|