Format
Browse filesreadme: seek alternative API
- README.md +2 -1
- README_zh.md +1 -0
- app.py +19 -10
README.md
CHANGED
|
@@ -58,7 +58,8 @@ CoT-Lab is an experimental interface exploring new paradigms in human-AI collabo
|
|
| 58 |
Explore hybrid human-AI problem-solving paradiam
|
| 59 |
|
| 60 |
## 📥 Installation & Deployment
|
| 61 |
-
Local deployment is (currently) required if you want to work with locally hosted LLMs.
|
|
|
|
| 62 |
|
| 63 |
**Prerequisites**: Python 3.11+ | Valid [Deepseek API Key](https://platform.deepseek.com/) or OpenAI SDK compatible API.
|
| 64 |
|
|
|
|
| 58 |
Explore hybrid human-AI problem-solving paradiam
|
| 59 |
|
| 60 |
## 📥 Installation & Deployment
|
| 61 |
+
Local deployment is (currently) required if you want to work with locally hosted LLMs.
|
| 62 |
+
Due to degraded performance of official DeepSeek API - We recommend seeking alternative API providers, or use locally hosted distilled-R1 for experiment.
|
| 63 |
|
| 64 |
**Prerequisites**: Python 3.11+ | Valid [Deepseek API Key](https://platform.deepseek.com/) or OpenAI SDK compatible API.
|
| 65 |
|
README_zh.md
CHANGED
|
@@ -58,6 +58,7 @@ CoT-Lab是一个探索人机协作新范式的实验性界面,基于**认知
|
|
| 58 |
|
| 59 |
## 📥 安装部署
|
| 60 |
如希望使用本地部署的大语言模型,您(暂时)需要克隆本项目并在本地运行。
|
|
|
|
| 61 |
|
| 62 |
**环境要求**:Python 3.11+ | 有效的[Deepseek API密钥](https://platform.deepseek.com/) 或其他OpenAI SDK兼容的API接口。
|
| 63 |
|
|
|
|
| 58 |
|
| 59 |
## 📥 安装部署
|
| 60 |
如希望使用本地部署的大语言模型,您(暂时)需要克隆本项目并在本地运行。
|
| 61 |
+
因近期DeepSeek官方API不稳定,我们建议暂时使用第三方API供应商作为替代方案,或者使用本地部署的R1-Distilled模型进行实验。
|
| 62 |
|
| 63 |
**环境要求**:Python 3.11+ | 有效的[Deepseek API密钥](https://platform.deepseek.com/) 或其他OpenAI SDK兼容的API接口。
|
| 64 |
|
app.py
CHANGED
|
@@ -38,12 +38,12 @@ class DynamicState:
|
|
| 38 |
def control_button_handler(self):
|
| 39 |
original_state = self.should_stream
|
| 40 |
self.should_stream = not self.should_stream
|
| 41 |
-
|
| 42 |
# 当从暂停->生成时激活等待状态
|
| 43 |
if not original_state and self.should_stream:
|
| 44 |
self.waiting_api = True
|
| 45 |
self.stream_completed = False
|
| 46 |
-
|
| 47 |
return self.ui_state_controller()
|
| 48 |
|
| 49 |
def ui_state_controller(self):
|
|
@@ -59,12 +59,12 @@ class DynamicState:
|
|
| 59 |
status_suffix = lang_data["waiting_api"]
|
| 60 |
else:
|
| 61 |
status_suffix = (
|
| 62 |
-
lang_data["completed"]
|
| 63 |
-
if self.stream_completed
|
| 64 |
else lang_data["interrupted"]
|
| 65 |
)
|
| 66 |
editor_label = f"{lang_data['editor_label']} - {status_suffix}"
|
| 67 |
-
|
| 68 |
return (
|
| 69 |
gr.update(value=control_value, variant=control_variant),
|
| 70 |
gr.update(label=editor_label),
|
|
@@ -151,11 +151,13 @@ class ConvoState:
|
|
| 151 |
|
| 152 |
try:
|
| 153 |
|
| 154 |
-
|
| 155 |
if dynamic_state.waiting_api:
|
| 156 |
status = lang_data["waiting_api"]
|
| 157 |
editor_label = f"{lang_data['editor_label']} - {status}"
|
| 158 |
-
yield full_response, gr.update(
|
|
|
|
|
|
|
| 159 |
|
| 160 |
coordinator = CoordinationManager(self.sync_threshold, current_content)
|
| 161 |
messages = [
|
|
@@ -173,7 +175,7 @@ class ConvoState:
|
|
| 173 |
stream=True,
|
| 174 |
timeout=AppConfig.API_TIMEOUT,
|
| 175 |
top_p=0.95,
|
| 176 |
-
temperature=0.6
|
| 177 |
)
|
| 178 |
for chunk in response_stream:
|
| 179 |
chunk_content = chunk.choices[0].delta.content
|
|
@@ -330,7 +332,14 @@ with gr.Blocks(theme=theme, css_paths="styles.css") as demo:
|
|
| 330 |
chatbot = gr.Chatbot(
|
| 331 |
type="messages",
|
| 332 |
height=300,
|
| 333 |
-
value=LANGUAGE_CONFIG["en"]["bot_default"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 334 |
group_consecutive_messages=False,
|
| 335 |
show_copy_all_button=True,
|
| 336 |
show_share_button=True,
|
|
@@ -396,7 +405,7 @@ with gr.Blocks(theme=theme, css_paths="styles.css") as demo:
|
|
| 396 |
[dynamic_state],
|
| 397 |
stateful_ui,
|
| 398 |
show_progress=False,
|
| 399 |
-
concurrency_limit=None
|
| 400 |
)
|
| 401 |
|
| 402 |
next_turn_btn.click(
|
|
|
|
| 38 |
def control_button_handler(self):
|
| 39 |
original_state = self.should_stream
|
| 40 |
self.should_stream = not self.should_stream
|
| 41 |
+
|
| 42 |
# 当从暂停->生成时激活等待状态
|
| 43 |
if not original_state and self.should_stream:
|
| 44 |
self.waiting_api = True
|
| 45 |
self.stream_completed = False
|
| 46 |
+
|
| 47 |
return self.ui_state_controller()
|
| 48 |
|
| 49 |
def ui_state_controller(self):
|
|
|
|
| 59 |
status_suffix = lang_data["waiting_api"]
|
| 60 |
else:
|
| 61 |
status_suffix = (
|
| 62 |
+
lang_data["completed"]
|
| 63 |
+
if self.stream_completed
|
| 64 |
else lang_data["interrupted"]
|
| 65 |
)
|
| 66 |
editor_label = f"{lang_data['editor_label']} - {status_suffix}"
|
| 67 |
+
|
| 68 |
return (
|
| 69 |
gr.update(value=control_value, variant=control_variant),
|
| 70 |
gr.update(label=editor_label),
|
|
|
|
| 151 |
|
| 152 |
try:
|
| 153 |
|
| 154 |
+
# 初始等待状态更新
|
| 155 |
if dynamic_state.waiting_api:
|
| 156 |
status = lang_data["waiting_api"]
|
| 157 |
editor_label = f"{lang_data['editor_label']} - {status}"
|
| 158 |
+
yield full_response, gr.update(
|
| 159 |
+
label=editor_label
|
| 160 |
+
), self.flatten_output()
|
| 161 |
|
| 162 |
coordinator = CoordinationManager(self.sync_threshold, current_content)
|
| 163 |
messages = [
|
|
|
|
| 175 |
stream=True,
|
| 176 |
timeout=AppConfig.API_TIMEOUT,
|
| 177 |
top_p=0.95,
|
| 178 |
+
temperature=0.6,
|
| 179 |
)
|
| 180 |
for chunk in response_stream:
|
| 181 |
chunk_content = chunk.choices[0].delta.content
|
|
|
|
| 332 |
chatbot = gr.Chatbot(
|
| 333 |
type="messages",
|
| 334 |
height=300,
|
| 335 |
+
value=LANGUAGE_CONFIG["en"]["bot_default"]
|
| 336 |
+
+ [
|
| 337 |
+
{
|
| 338 |
+
"role": "assistant",
|
| 339 |
+
"content": f"Running `{os.getenv('API_MODEL')}` @ {os.getenv('API_URL')} \n Proformance subjects to API provider situation",
|
| 340 |
+
"metadata": {"title": f"API INFO"},
|
| 341 |
+
}
|
| 342 |
+
],
|
| 343 |
group_consecutive_messages=False,
|
| 344 |
show_copy_all_button=True,
|
| 345 |
show_share_button=True,
|
|
|
|
| 405 |
[dynamic_state],
|
| 406 |
stateful_ui,
|
| 407 |
show_progress=False,
|
| 408 |
+
concurrency_limit=None,
|
| 409 |
)
|
| 410 |
|
| 411 |
next_turn_btn.click(
|