Kevin Hu
commited on
Commit
·
0b69c87
1
Parent(s):
32ef5e5
refine TTS (#2500)
Browse files### What problem does this PR solve?
### Type of change
- [x] Bug Fix (non-breaking change which fixes an issue)
- api/apps/conversation_app.py +3 -2
- api/apps/llm_app.py +10 -7
- rag/llm/tts_model.py +2 -1
api/apps/conversation_app.py
CHANGED
|
@@ -228,8 +228,9 @@ def tts():
|
|
| 228 |
|
| 229 |
def stream_audio():
|
| 230 |
try:
|
| 231 |
-
for
|
| 232 |
-
|
|
|
|
| 233 |
except Exception as e:
|
| 234 |
yield ("data:" + json.dumps({"retcode": 500, "retmsg": str(e),
|
| 235 |
"data": {"answer": "**ERROR**: " + str(e)}},
|
|
|
|
| 228 |
|
| 229 |
def stream_audio():
|
| 230 |
try:
|
| 231 |
+
for txt in re.split(r"[,。/《》?;:!\n\r:;]+", text):
|
| 232 |
+
for chunk in tts_mdl.tts(txt):
|
| 233 |
+
yield chunk
|
| 234 |
except Exception as e:
|
| 235 |
yield ("data:" + json.dumps({"retcode": 500, "retmsg": str(e),
|
| 236 |
"data": {"answer": "**ERROR**: " + str(e)}},
|
api/apps/llm_app.py
CHANGED
|
@@ -93,24 +93,27 @@ def set_api_key():
|
|
| 93 |
if msg:
|
| 94 |
return get_data_error_result(retmsg=msg)
|
| 95 |
|
| 96 |
-
|
| 97 |
"api_key": req["api_key"],
|
| 98 |
"api_base": req.get("base_url", "")
|
| 99 |
}
|
| 100 |
for n in ["model_type", "llm_name"]:
|
| 101 |
if n in req:
|
| 102 |
-
|
| 103 |
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
|
|
|
|
|
|
|
|
|
| 107 |
TenantLLMService.save(
|
| 108 |
tenant_id=current_user.id,
|
| 109 |
llm_factory=factory,
|
| 110 |
llm_name=llm.llm_name,
|
| 111 |
model_type=llm.model_type,
|
| 112 |
-
api_key=
|
| 113 |
-
api_base=
|
| 114 |
)
|
| 115 |
|
| 116 |
return get_json_result(data=True)
|
|
|
|
| 93 |
if msg:
|
| 94 |
return get_data_error_result(retmsg=msg)
|
| 95 |
|
| 96 |
+
llm_config = {
|
| 97 |
"api_key": req["api_key"],
|
| 98 |
"api_base": req.get("base_url", "")
|
| 99 |
}
|
| 100 |
for n in ["model_type", "llm_name"]:
|
| 101 |
if n in req:
|
| 102 |
+
llm_config[n] = req[n]
|
| 103 |
|
| 104 |
+
for llm in LLMService.query(fid=factory):
|
| 105 |
+
if not TenantLLMService.filter_update(
|
| 106 |
+
[TenantLLM.tenant_id == current_user.id,
|
| 107 |
+
TenantLLM.llm_factory == factory,
|
| 108 |
+
TenantLLM.llm_name == llm.llm_name],
|
| 109 |
+
llm_config):
|
| 110 |
TenantLLMService.save(
|
| 111 |
tenant_id=current_user.id,
|
| 112 |
llm_factory=factory,
|
| 113 |
llm_name=llm.llm_name,
|
| 114 |
model_type=llm.model_type,
|
| 115 |
+
api_key=llm_config["api_key"],
|
| 116 |
+
api_base=llm_config["api_base"]
|
| 117 |
)
|
| 118 |
|
| 119 |
return get_json_result(data=True)
|
rag/llm/tts_model.py
CHANGED
|
@@ -161,6 +161,7 @@ class QwenTTS(Base):
|
|
| 161 |
|
| 162 |
class OpenAITTS(Base):
|
| 163 |
def __init__(self, key, model_name="tts-1", base_url="https://api.openai.com/v1"):
|
|
|
|
| 164 |
self.api_key = key
|
| 165 |
self.model_name = model_name
|
| 166 |
self.base_url = base_url
|
|
@@ -181,6 +182,6 @@ class OpenAITTS(Base):
|
|
| 181 |
|
| 182 |
if response.status_code != 200:
|
| 183 |
raise Exception(f"**Error**: {response.status_code}, {response.text}")
|
| 184 |
-
for chunk in response.iter_content(
|
| 185 |
if chunk:
|
| 186 |
yield chunk
|
|
|
|
| 161 |
|
| 162 |
class OpenAITTS(Base):
|
| 163 |
def __init__(self, key, model_name="tts-1", base_url="https://api.openai.com/v1"):
|
| 164 |
+
if not base_url: base_url="https://api.openai.com/v1"
|
| 165 |
self.api_key = key
|
| 166 |
self.model_name = model_name
|
| 167 |
self.base_url = base_url
|
|
|
|
| 182 |
|
| 183 |
if response.status_code != 200:
|
| 184 |
raise Exception(f"**Error**: {response.status_code}, {response.text}")
|
| 185 |
+
for chunk in response.iter_content():
|
| 186 |
if chunk:
|
| 187 |
yield chunk
|