Spaces:
Sleeping
Sleeping
Vela
commited on
Commit
·
dab58f3
1
Parent(s):
f7d4608
modified llm_service module
Browse files
application/services/llm_service.py
CHANGED
@@ -8,7 +8,7 @@ from application.utils import logger
|
|
8 |
from application.schemas.response_schema import RESPONSE_FORMAT,GEMINI_RESPONSE_FORMAT
|
9 |
|
10 |
logger = logger.get_logger()
|
11 |
-
client = OpenAI()
|
12 |
|
13 |
# --- Constants ---
|
14 |
|
@@ -100,7 +100,6 @@ def extract_emissions_data_as_json(
|
|
100 |
"""
|
101 |
try:
|
102 |
if api.lower() == "openai":
|
103 |
-
client = OpenAI()
|
104 |
file = get_or_create_file(file_input, client)
|
105 |
|
106 |
logger.info("[OpenAI] Sending content for generation...")
|
|
|
8 |
from application.schemas.response_schema import RESPONSE_FORMAT,GEMINI_RESPONSE_FORMAT
|
9 |
|
10 |
logger = logger.get_logger()
|
11 |
+
client = OpenAI(api_key=os.getenv("openai_api_key"))
|
12 |
|
13 |
# --- Constants ---
|
14 |
|
|
|
100 |
"""
|
101 |
try:
|
102 |
if api.lower() == "openai":
|
|
|
103 |
file = get_or_create_file(file_input, client)
|
104 |
|
105 |
logger.info("[OpenAI] Sending content for generation...")
|