Spaces:
Sleeping
Sleeping
""" | |
LLM ์ธํฐํ์ด์ค ๋ชจ๋ - ๋ค์ํ LLM์ ํตํฉ ๊ด๋ฆฌ | |
""" | |
import os | |
import logging | |
from typing import List, Dict, Any, Optional, Union | |
from dotenv import load_dotenv | |
# LLM ํด๋ผ์ด์ธํธ ์ํฌํธ | |
from utils.openai_client import OpenAILLM | |
from utils.deepseek_client import DeepSeekLLM | |
# ํ๊ฒฝ ๋ณ์ ๋ก๋ | |
load_dotenv() | |
# ๋ก๊ฑฐ ์ค์ | |
logger = logging.getLogger("LLMInterface") | |
if not logger.hasHandlers(): | |
handler = logging.StreamHandler() | |
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') | |
handler.setFormatter(formatter) | |
logger.addHandler(handler) | |
logger.setLevel(logging.INFO) | |
class LLMInterface: | |
"""๋ค์ํ LLM API๋ฅผ ํตํฉ ๊ด๋ฆฌํ๋ ์ธํฐํ์ด์ค ํด๋์ค""" | |
# ์ง์๋๋ LLM ๋ชฉ๋ก (UI์์ ํ์๋ ์ด๋ฆ๊ณผ ๋ด๋ถ ์๋ณ์) | |
SUPPORTED_LLMS = { | |
"OpenAI": "openai", | |
"DeepSeek": "deepseek" | |
} | |
def __init__(self, default_llm: str = "openai"): | |
"""LLM ์ธํฐํ์ด์ค ์ด๊ธฐํ | |
Args: | |
default_llm: ๊ธฐ๋ณธ LLM ์๋ณ์ ('openai' ๋๋ 'deepseek') | |
""" | |
# LLM ํด๋ผ์ด์ธํธ ์ด๊ธฐํ | |
self.llm_clients = { | |
"openai": OpenAILLM(), | |
"deepseek": DeepSeekLLM() | |
} | |
# ๊ธฐ๋ณธ LLM ์ค์ (์ ํจํ์ง ์์ ๊ฒฝ์ฐ openai๋ก ์ค์ ) | |
if default_llm not in self.llm_clients: | |
logger.warning(f"์ง์ ๋ ๊ธฐ๋ณธ LLM '{default_llm}'๊ฐ ์ ํจํ์ง ์์ต๋๋ค. 'openai'๋ก ์ค์ ๋ฉ๋๋ค.") | |
default_llm = "openai" | |
self.default_llm = default_llm | |
self.current_llm = default_llm | |
logger.info(f"LLM ์ธํฐํ์ด์ค ์ด๊ธฐํ ์๋ฃ, ๊ธฐ๋ณธ LLM: {default_llm}") | |
def set_llm(self, llm_id: str) -> bool: | |
"""ํ์ฌ LLM์ ์ค์ | |
Args: | |
llm_id: LLM ์๋ณ์ | |
Returns: | |
์ฑ๊ณต ์ฌ๋ถ | |
""" | |
if llm_id not in self.llm_clients: | |
logger.error(f"์ง์๋์ง ์๋ LLM ์๋ณ์: {llm_id}") | |
return False | |
self.current_llm = llm_id | |
logger.info(f"ํ์ฌ LLM์ด '{llm_id}'๋ก ์ค์ ๋์์ต๋๋ค.") | |
return True | |
def get_current_llm_name(self) -> str: | |
"""ํ์ฌ LLM์ ํ์ ์ด๋ฆ ๋ฐํ""" | |
for name, id in self.SUPPORTED_LLMS.items(): | |
if id == self.current_llm: | |
return name | |
return "Unknown" | |
def get_current_llm_details(self) -> Dict[str, str]: | |
"""ํ์ฌ LLM์ ์ธ๋ถ ์ ๋ณด ๋ฐํ""" | |
name = self.get_current_llm_name() | |
model = "" | |
if self.current_llm == "openai": | |
model = self.llm_clients["openai"].model | |
elif self.current_llm == "deepseek": | |
model = self.llm_clients["deepseek"].model | |
return { | |
"name": name, | |
"id": self.current_llm, | |
"model": model | |
} | |
def generate( | |
self, | |
prompt: str, | |
system_prompt: Optional[str] = None, | |
llm_id: Optional[str] = None, | |
**kwargs | |
) -> str: | |
"""ํ ์คํธ ์์ฑ | |
Args: | |
prompt: ์ฌ์ฉ์ ํ๋กฌํํธ | |
system_prompt: ์์คํ ํ๋กฌํํธ (์ ํ ์ฌํญ) | |
llm_id: ์ฌ์ฉํ LLM ์๋ณ์ (๋ฏธ์ง์ ์ ํ์ฌ LLM ์ฌ์ฉ) | |
**kwargs: ์ถ๊ฐ ์ธ์ (temperature, max_tokens ๋ฑ) | |
Returns: | |
์์ฑ๋ ํ ์คํธ | |
""" | |
# ์ฌ์ฉํ LLM ๊ฒฐ์ | |
llm_to_use = llm_id if llm_id and llm_id in self.llm_clients else self.current_llm | |
llm_client = self.llm_clients[llm_to_use] | |
# LLM ์ ๋ณด ๋ก๊น | |
logger.info(f"ํ ์คํธ ์์ฑ ์์ฒญ, LLM: {llm_to_use}") | |
# ์์ฑ ์์ฒญ | |
return llm_client.generate( | |
prompt=prompt, | |
system_prompt=system_prompt, | |
**kwargs | |
) | |
def rag_generate( | |
self, | |
query: str, | |
context: List[str], | |
llm_id: Optional[str] = None, | |
**kwargs | |
) -> str: | |
"""RAG ๊ธฐ๋ฐ ํ ์คํธ ์์ฑ | |
Args: | |
query: ์ฌ์ฉ์ ์ง์ | |
context: ๊ฒ์๋ ๋ฌธ๋งฅ ๋ชฉ๋ก | |
llm_id: ์ฌ์ฉํ LLM ์๋ณ์ (๋ฏธ์ง์ ์ ํ์ฌ LLM ์ฌ์ฉ) | |
**kwargs: ์ถ๊ฐ ์ธ์ (temperature, max_tokens ๋ฑ) | |
Returns: | |
์์ฑ๋ ํ ์คํธ | |
""" | |
# ์ฌ์ฉํ LLM ๊ฒฐ์ | |
llm_to_use = llm_id if llm_id and llm_id in self.llm_clients else self.current_llm | |
llm_client = self.llm_clients[llm_to_use] | |
# LLM ์ ๋ณด ๋ก๊น | |
logger.info(f"RAG ํ ์คํธ ์์ฑ ์์ฒญ, LLM: {llm_to_use}") | |
# ์์ฑ ์์ฒญ | |
return llm_client.rag_generate( | |
query=query, | |
context=context, | |
**kwargs | |
) | |