Spaces:
Sleeping
Sleeping
File size: 4,944 Bytes
6575706 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 |
"""
LLM ์ธํฐํ์ด์ค ๋ชจ๋ - ๋ค์ํ LLM์ ํตํฉ ๊ด๋ฆฌ
"""
import os
import logging
from typing import List, Dict, Any, Optional, Union
from dotenv import load_dotenv
# LLM ํด๋ผ์ด์ธํธ ์ํฌํธ
from utils.openai_client import OpenAILLM
from utils.deepseek_client import DeepSeekLLM
# ํ๊ฒฝ ๋ณ์ ๋ก๋
load_dotenv()
# ๋ก๊ฑฐ ์ค์
logger = logging.getLogger("LLMInterface")
if not logger.hasHandlers():
handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
class LLMInterface:
"""๋ค์ํ LLM API๋ฅผ ํตํฉ ๊ด๋ฆฌํ๋ ์ธํฐํ์ด์ค ํด๋์ค"""
# ์ง์๋๋ LLM ๋ชฉ๋ก (UI์์ ํ์๋ ์ด๋ฆ๊ณผ ๋ด๋ถ ์๋ณ์)
SUPPORTED_LLMS = {
"OpenAI": "openai",
"DeepSeek": "deepseek"
}
def __init__(self, default_llm: str = "openai"):
"""LLM ์ธํฐํ์ด์ค ์ด๊ธฐํ
Args:
default_llm: ๊ธฐ๋ณธ LLM ์๋ณ์ ('openai' ๋๋ 'deepseek')
"""
# LLM ํด๋ผ์ด์ธํธ ์ด๊ธฐํ
self.llm_clients = {
"openai": OpenAILLM(),
"deepseek": DeepSeekLLM()
}
# ๊ธฐ๋ณธ LLM ์ค์ (์ ํจํ์ง ์์ ๊ฒฝ์ฐ openai๋ก ์ค์ )
if default_llm not in self.llm_clients:
logger.warning(f"์ง์ ๋ ๊ธฐ๋ณธ LLM '{default_llm}'๊ฐ ์ ํจํ์ง ์์ต๋๋ค. 'openai'๋ก ์ค์ ๋ฉ๋๋ค.")
default_llm = "openai"
self.default_llm = default_llm
self.current_llm = default_llm
logger.info(f"LLM ์ธํฐํ์ด์ค ์ด๊ธฐํ ์๋ฃ, ๊ธฐ๋ณธ LLM: {default_llm}")
def set_llm(self, llm_id: str) -> bool:
"""ํ์ฌ LLM์ ์ค์
Args:
llm_id: LLM ์๋ณ์
Returns:
์ฑ๊ณต ์ฌ๋ถ
"""
if llm_id not in self.llm_clients:
logger.error(f"์ง์๋์ง ์๋ LLM ์๋ณ์: {llm_id}")
return False
self.current_llm = llm_id
logger.info(f"ํ์ฌ LLM์ด '{llm_id}'๋ก ์ค์ ๋์์ต๋๋ค.")
return True
def get_current_llm_name(self) -> str:
"""ํ์ฌ LLM์ ํ์ ์ด๋ฆ ๋ฐํ"""
for name, id in self.SUPPORTED_LLMS.items():
if id == self.current_llm:
return name
return "Unknown"
def get_current_llm_details(self) -> Dict[str, str]:
"""ํ์ฌ LLM์ ์ธ๋ถ ์ ๋ณด ๋ฐํ"""
name = self.get_current_llm_name()
model = ""
if self.current_llm == "openai":
model = self.llm_clients["openai"].model
elif self.current_llm == "deepseek":
model = self.llm_clients["deepseek"].model
return {
"name": name,
"id": self.current_llm,
"model": model
}
def generate(
self,
prompt: str,
system_prompt: Optional[str] = None,
llm_id: Optional[str] = None,
**kwargs
) -> str:
"""ํ
์คํธ ์์ฑ
Args:
prompt: ์ฌ์ฉ์ ํ๋กฌํํธ
system_prompt: ์์คํ
ํ๋กฌํํธ (์ ํ ์ฌํญ)
llm_id: ์ฌ์ฉํ LLM ์๋ณ์ (๋ฏธ์ง์ ์ ํ์ฌ LLM ์ฌ์ฉ)
**kwargs: ์ถ๊ฐ ์ธ์ (temperature, max_tokens ๋ฑ)
Returns:
์์ฑ๋ ํ
์คํธ
"""
# ์ฌ์ฉํ LLM ๊ฒฐ์
llm_to_use = llm_id if llm_id and llm_id in self.llm_clients else self.current_llm
llm_client = self.llm_clients[llm_to_use]
# LLM ์ ๋ณด ๋ก๊น
logger.info(f"ํ
์คํธ ์์ฑ ์์ฒญ, LLM: {llm_to_use}")
# ์์ฑ ์์ฒญ
return llm_client.generate(
prompt=prompt,
system_prompt=system_prompt,
**kwargs
)
def rag_generate(
self,
query: str,
context: List[str],
llm_id: Optional[str] = None,
**kwargs
) -> str:
"""RAG ๊ธฐ๋ฐ ํ
์คํธ ์์ฑ
Args:
query: ์ฌ์ฉ์ ์ง์
context: ๊ฒ์๋ ๋ฌธ๋งฅ ๋ชฉ๋ก
llm_id: ์ฌ์ฉํ LLM ์๋ณ์ (๋ฏธ์ง์ ์ ํ์ฌ LLM ์ฌ์ฉ)
**kwargs: ์ถ๊ฐ ์ธ์ (temperature, max_tokens ๋ฑ)
Returns:
์์ฑ๋ ํ
์คํธ
"""
# ์ฌ์ฉํ LLM ๊ฒฐ์
llm_to_use = llm_id if llm_id and llm_id in self.llm_clients else self.current_llm
llm_client = self.llm_clients[llm_to_use]
# LLM ์ ๋ณด ๋ก๊น
logger.info(f"RAG ํ
์คํธ ์์ฑ ์์ฒญ, LLM: {llm_to_use}")
# ์์ฑ ์์ฒญ
return llm_client.rag_generate(
query=query,
context=context,
**kwargs
)
|