Deadmon commited on
Commit
6107cf6
·
verified ·
1 Parent(s): f6491de

Update azure_openai.py

Browse files
Files changed (1) hide show
  1. azure_openai.py +3 -3
azure_openai.py CHANGED
@@ -1,7 +1,7 @@
1
  import httpx
2
  import logging
3
  import os
4
- from pipecat.frames.frames import TextFrame, LLMResponseFrame
5
  from pipecat.processors.frame_processor import FrameProcessor, FrameDirection
6
 
7
  logger = logging.getLogger(__name__)
@@ -17,7 +17,7 @@ class AzureOpenAILLMService(FrameProcessor):
17
  self.endpoint = endpoint
18
  self.client = httpx.AsyncClient()
19
 
20
- async def process_frame(self, frame: Frame, direction: FrameDirection):
21
  if isinstance(frame, TextFrame) and direction == FrameDirection.UPSTREAM:
22
  try:
23
  messages = []
@@ -45,7 +45,7 @@ class AzureOpenAILLMService(FrameProcessor):
45
  if "choices" in result and len(result["choices"]) > 0:
46
  content = result["choices"][0]["message"]["content"]
47
  continue_flag = len(content) >= 4000
48
- await self.push_frame(LLMResponseFrame(content=content, continue_flag=continue_flag))
49
  else:
50
  logger.error("No valid content in API response")
51
  await self.push_frame(TextFrame("Error: No valid response from LLM"))
 
1
  import httpx
2
  import logging
3
  import os
4
+ from pipecat.frames import TextFrame # Updated import
5
  from pipecat.processors.frame_processor import FrameProcessor, FrameDirection
6
 
7
  logger = logging.getLogger(__name__)
 
17
  self.endpoint = endpoint
18
  self.client = httpx.AsyncClient()
19
 
20
+ async def process_frame(self, frame, direction: FrameDirection):
21
  if isinstance(frame, TextFrame) and direction == FrameDirection.UPSTREAM:
22
  try:
23
  messages = []
 
45
  if "choices" in result and len(result["choices"]) > 0:
46
  content = result["choices"][0]["message"]["content"]
47
  continue_flag = len(content) >= 4000
48
+ await self.push_frame(TextFrame(content=content)) # Corrected this line
49
  else:
50
  logger.error("No valid content in API response")
51
  await self.push_frame(TextFrame("Error: No valid response from LLM"))