pentarosarium commited on
Commit
99193c1
·
1 Parent(s): 2b18614

progress more (back to 3.22)

Browse files
Files changed (1) hide show
  1. app.py +25 -35
app.py CHANGED
@@ -137,33 +137,19 @@ def fuzzy_deduplicate(df, column, threshold=65):
137
 
138
  def translate_text(llm, text):
139
  try:
140
- if isinstance(llm, ChatOpenAI):
141
- # Handle OpenAI-compatible API calls (Groq, OpenAI)
142
- messages = [
143
- {"role": "system", "content": "You are a translator. Translate the given Russian text to English accurately and concisely."},
144
- {"role": "user", "content": f"Translate this Russian text to English: {text}"}
145
- ]
146
- response = llm.invoke(messages)
147
-
148
- if hasattr(response, 'content'):
149
- return response.content.strip()
150
- elif isinstance(response, str):
151
- return response.strip()
152
- else:
153
- return str(response).strip()
154
  else:
155
- # For Qwen pipeline
156
- messages = [
157
- {"role": "system", "content": "You are a translator. Translate the given Russian text to English accurately and concisely."},
158
- {"role": "user", "content": f"Translate this Russian text to English: {text}"}
159
- ]
160
-
161
- # Generate response using pipeline
162
- response = llm(messages, max_length=512, num_return_sequences=1)[0]['generated_text']
163
-
164
- # Extract the relevant part of the response (after the prompt)
165
- response_text = response.split("English:")[-1].strip()
166
- return response_text
167
 
168
  except Exception as e:
169
  st.error(f"Translation error: {str(e)}")
@@ -196,14 +182,18 @@ def init_langchain_llm(model_choice):
196
  temperature=0.0
197
  )
198
 
199
- else: # Qwen model
200
- # Initialize Qwen pipeline
201
- pipe = pipeline(
202
- "text-generation",
203
- model="Qwen/Qwen2.5-7B-Instruct-GPTQ-Int8",
204
- device_map="auto"
 
 
 
 
 
205
  )
206
- return pipe
207
 
208
  except Exception as e:
209
  st.error(f"Error initializing the LLM: {str(e)}")
@@ -474,12 +464,12 @@ def create_output_file(df, uploaded_file, llm):
474
 
475
  def main():
476
  with st.sidebar:
477
- st.title("::: AI-анализ мониторинга новостей (v.3.21):::")
478
  st.subheader("по материалам СКАН-ИНТЕРФАКС ")
479
 
480
  model_choice = st.radio(
481
  "Выберите модель для анализа:",
482
- ["Groq (llama-3.1-70b)", "ChatGPT-4-mini", "Qwen 2.5-7B (GPTQ-Int8)"],
483
  key="model_selector"
484
  )
485
 
 
137
 
138
  def translate_text(llm, text):
139
  try:
140
+ # All models now use OpenAI-compatible API format
141
+ messages = [
142
+ {"role": "system", "content": "You are a translator. Translate the given Russian text to English accurately and concisely."},
143
+ {"role": "user", "content": f"Translate this Russian text to English: {text}"}
144
+ ]
145
+ response = llm.invoke(messages)
146
+
147
+ if hasattr(response, 'content'):
148
+ return response.content.strip()
149
+ elif isinstance(response, str):
150
+ return response.strip()
 
 
 
151
  else:
152
+ return str(response).strip()
 
 
 
 
 
 
 
 
 
 
 
153
 
154
  except Exception as e:
155
  st.error(f"Translation error: {str(e)}")
 
182
  temperature=0.0
183
  )
184
 
185
+ else: # Qwen API
186
+ if 'dashscope_api_key' not in st.secrets:
187
+ st.error("DashScope API key not found in secrets. Please add it with the key 'dashscope_api_key'.")
188
+ st.stop()
189
+
190
+ # Using Qwen's API through DashScope
191
+ return ChatOpenAI(
192
+ base_url="https://dashscope.aliyuncs.com/api/v1",
193
+ model="qwen-max",
194
+ openai_api_key=st.secrets['ali_key'],
195
+ temperature=0.0
196
  )
 
197
 
198
  except Exception as e:
199
  st.error(f"Error initializing the LLM: {str(e)}")
 
464
 
465
  def main():
466
  with st.sidebar:
467
+ st.title("::: AI-анализ мониторинга новостей (v.3.22):::")
468
  st.subheader("по материалам СКАН-ИНТЕРФАКС ")
469
 
470
  model_choice = st.radio(
471
  "Выберите модель для анализа:",
472
+ ["Groq (llama-3.1-70b)", "ChatGPT-4-mini", "Qwen-Max"],
473
  key="model_selector"
474
  )
475