Spaces:
Sleeping
Sleeping
Change OpenAI model
Browse files- chat_manager.py +1 -1
- config.py +1 -1
- main.py +2 -2
- ui_components.py +1 -1
chat_manager.py
CHANGED
@@ -55,7 +55,7 @@ class ChatManager:
|
|
55 |
print(f"Error formatting datetime: {str(e)}")
|
56 |
return dt_str
|
57 |
|
58 |
-
def _create_new_session(self, system_prompt: str = "", model: str = "
|
59 |
return ChatSession(
|
60 |
id=datetime.now().strftime("%Y%m%d_%H%M%S"),
|
61 |
messages=[],
|
|
|
55 |
print(f"Error formatting datetime: {str(e)}")
|
56 |
return dt_str
|
57 |
|
58 |
+
def _create_new_session(self, system_prompt: str = "", model: str = "openrouter/auto") -> ChatSession:
|
59 |
return ChatSession(
|
60 |
id=datetime.now().strftime("%Y%m%d_%H%M%S"),
|
61 |
messages=[],
|
config.py
CHANGED
@@ -6,7 +6,7 @@ import pytz
|
|
6 |
@dataclass
|
7 |
class Config:
|
8 |
# the newest OpenAI model is "gpt-4o" which was released May 13, 2024.
|
9 |
-
OPENAI_MODEL = "gpt-4o"
|
10 |
GEMINI_MODEL = "google/gemini-2.0-flash-exp:free"
|
11 |
AUTO_MODEL = "openrouter/auto"
|
12 |
# CLAUDE_MODEL = "anthropic/claude-3-sonnet:free"
|
|
|
6 |
@dataclass
|
7 |
class Config:
|
8 |
# the newest OpenAI model is "gpt-4o" which was released May 13, 2024.
|
9 |
+
OPENAI_MODEL = "o1-mini-2024-09-12" # Changed from "gpt-4o" to "o1-mini-2024-09-12"
|
10 |
GEMINI_MODEL = "google/gemini-2.0-flash-exp:free"
|
11 |
AUTO_MODEL = "openrouter/auto"
|
12 |
# CLAUDE_MODEL = "anthropic/claude-3-sonnet:free"
|
main.py
CHANGED
@@ -206,7 +206,7 @@ def main():
|
|
206 |
# Model selection logic
|
207 |
model_map = {
|
208 |
"OpenRouter-Auto": llm_client.chat_auto,
|
209 |
-
"
|
210 |
"Claude-3.5": llm_client.chat_claude,
|
211 |
"Gemini-2.0": llm_client.chat_gemini
|
212 |
}
|
@@ -222,7 +222,7 @@ def main():
|
|
222 |
else:
|
223 |
show_notification(i18n.get_text("error_rate_limit"), "warning")
|
224 |
elif "API key" in error_msg:
|
225 |
-
if model == "
|
226 |
show_notification(i18n.get_text("error_model_switch_openai"), "error")
|
227 |
else:
|
228 |
show_notification(i18n.get_text("error_model_switch_openrouter"), "error")
|
|
|
206 |
# Model selection logic
|
207 |
model_map = {
|
208 |
"OpenRouter-Auto": llm_client.chat_auto,
|
209 |
+
"OpenAI": llm_client.chat_openai,
|
210 |
"Claude-3.5": llm_client.chat_claude,
|
211 |
"Gemini-2.0": llm_client.chat_gemini
|
212 |
}
|
|
|
222 |
else:
|
223 |
show_notification(i18n.get_text("error_rate_limit"), "warning")
|
224 |
elif "API key" in error_msg:
|
225 |
+
if model == "OpenAI":
|
226 |
show_notification(i18n.get_text("error_model_switch_openai"), "error")
|
227 |
else:
|
228 |
show_notification(i18n.get_text("error_model_switch_openrouter"), "error")
|
ui_components.py
CHANGED
@@ -52,7 +52,7 @@ def render_sidebar(i18n, chat_manager):
|
|
52 |
# Model selection - デフォルトをOpenRouter-Autoに設定
|
53 |
model = st.selectbox(
|
54 |
i18n.get_text("model_selection"),
|
55 |
-
["OpenRouter-Auto", "
|
56 |
index=0,
|
57 |
key="model_selection"
|
58 |
)
|
|
|
52 |
# Model selection - デフォルトをOpenRouter-Autoに設定
|
53 |
model = st.selectbox(
|
54 |
i18n.get_text("model_selection"),
|
55 |
+
["OpenRouter-Auto", "OpenAI", "Claude-3.5", "Gemini-2.0"],
|
56 |
index=0,
|
57 |
key="model_selection"
|
58 |
)
|