schoolkithub commited on
Commit
29032bf
·
verified ·
1 Parent(s): 043cb3a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -21
app.py CHANGED
@@ -12,7 +12,7 @@ import pdfplumber
12
  # ==== CONFIG ====
13
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
14
  HF_TOKEN = os.getenv("HF_TOKEN")
15
- GROK_API_KEY = os.getenv("GROK_API_KEY") or "xai-AyJXz3OAAMuQiOrPzPptUWTmsEyI9vywPpbV19S1nCpXXKWoKLqOoGc61RazPPui2fx4Ekb1durXccqz"
16
 
17
  CONVERSATIONAL_MODELS = [
18
  "deepseek-ai/DeepSeek-LLM",
@@ -25,7 +25,7 @@ wiki_api = wikipediaapi.Wikipedia(language="en", user_agent="SmartAgent/1.0 (cho
25
  # ==== UTILITY: Link/file detection ====
26
  def extract_links(text):
27
  url_pattern = re.compile(r'(https?://[^\s\)\],]+)')
28
- return url_pattern.findall(text)
29
 
30
  def download_file(url, out_dir="tmp_files"):
31
  os.makedirs(out_dir, exist_ok=True)
@@ -70,7 +70,7 @@ def analyze_file(file_path):
70
  return f"TXT error: {e}"
71
  else:
72
  return f"Unsupported file type: {file_path}"
73
-
74
  def analyze_webpage(url):
75
  try:
76
  r = requests.get(url, timeout=15)
@@ -108,36 +108,40 @@ def llm_conversational(query):
108
  hf_client = InferenceClient(model_id, token=HF_TOKEN)
109
  # Try conversational if available, else fallback to text_generation
110
  if hasattr(hf_client, "conversational"):
111
- result = hf_client.conversational(
112
- messages=[{"role": "user", "content": query}],
113
- max_new_tokens=384,
114
- )
 
 
 
 
 
 
 
 
 
 
 
 
115
  if isinstance(result, dict) and "generated_text" in result:
116
  return result["generated_text"]
117
- elif hasattr(result, "generated_text"):
118
- return result.generated_text
119
  elif isinstance(result, str):
120
  return result
121
- else:
122
- continue
123
- result = hf_client.text_generation(query, max_new_tokens=384)
124
- if isinstance(result, dict) and "generated_text" in result:
125
- return result["generated_text"]
126
- elif isinstance(result, str):
127
- return result
128
  except Exception as e:
129
  last_error = f"{model_id}: {e}"
130
  return None
131
 
132
  def is_coding_question(text):
133
- # Basic heuristic: mentions code, function, "python", code blocks, etc.
134
  code_terms = [
135
  "python", "java", "c++", "code", "function", "write a", "script", "algorithm",
136
  "bug", "traceback", "error", "output", "compile", "debug"
137
  ]
138
- if any(term in text.lower() for term in code_terms):
139
  return True
140
- if re.search(r"```.+```", text, re.DOTALL):
141
  return True
142
  return False
143
 
@@ -160,9 +164,8 @@ def grok_completion(question, system_prompt=None):
160
  r = requests.post(url, headers=headers, json=payload, timeout=45)
161
  r.raise_for_status()
162
  data = r.json()
163
- # Extract assistant's reply
164
  return data['choices'][0]['message']['content']
165
- except Exception as e:
166
  return None
167
 
168
  # ==== SMART AGENT ====
@@ -181,6 +184,8 @@ class SmartAgent:
181
  if local:
182
  file_analysis = analyze_file(local)
183
  results.append(f"File ({url}):\n{file_analysis}")
 
 
184
  else:
185
  results.append(analyze_webpage(url))
186
  if results:
 
12
  # ==== CONFIG ====
13
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
14
  HF_TOKEN = os.getenv("HF_TOKEN")
15
+ GROK_API_KEY = os.getenv("GROK_API_KEY")
16
 
17
  CONVERSATIONAL_MODELS = [
18
  "deepseek-ai/DeepSeek-LLM",
 
25
  # ==== UTILITY: Link/file detection ====
26
  def extract_links(text):
27
  url_pattern = re.compile(r'(https?://[^\s\)\],]+)')
28
+ return url_pattern.findall(text or "")
29
 
30
  def download_file(url, out_dir="tmp_files"):
31
  os.makedirs(out_dir, exist_ok=True)
 
70
  return f"TXT error: {e}"
71
  else:
72
  return f"Unsupported file type: {file_path}"
73
+
74
  def analyze_webpage(url):
75
  try:
76
  r = requests.get(url, timeout=15)
 
108
  hf_client = InferenceClient(model_id, token=HF_TOKEN)
109
  # Try conversational if available, else fallback to text_generation
110
  if hasattr(hf_client, "conversational"):
111
+ try:
112
+ result = hf_client.conversational(
113
+ messages=[{"role": "user", "content": query}],
114
+ max_new_tokens=384,
115
+ )
116
+ if isinstance(result, dict) and "generated_text" in result:
117
+ return result["generated_text"]
118
+ elif hasattr(result, "generated_text"):
119
+ return result.generated_text
120
+ elif isinstance(result, str):
121
+ return result
122
+ except Exception:
123
+ pass
124
+ # Fallback to text_generation
125
+ try:
126
+ result = hf_client.text_generation(query, max_new_tokens=384)
127
  if isinstance(result, dict) and "generated_text" in result:
128
  return result["generated_text"]
 
 
129
  elif isinstance(result, str):
130
  return result
131
+ except Exception:
132
+ pass
 
 
 
 
 
133
  except Exception as e:
134
  last_error = f"{model_id}: {e}"
135
  return None
136
 
137
  def is_coding_question(text):
 
138
  code_terms = [
139
  "python", "java", "c++", "code", "function", "write a", "script", "algorithm",
140
  "bug", "traceback", "error", "output", "compile", "debug"
141
  ]
142
+ if any(term in (text or "").lower() for term in code_terms):
143
  return True
144
+ if re.search(r"```.+```", text or "", re.DOTALL):
145
  return True
146
  return False
147
 
 
164
  r = requests.post(url, headers=headers, json=payload, timeout=45)
165
  r.raise_for_status()
166
  data = r.json()
 
167
  return data['choices'][0]['message']['content']
168
+ except Exception:
169
  return None
170
 
171
  # ==== SMART AGENT ====
 
184
  if local:
185
  file_analysis = analyze_file(local)
186
  results.append(f"File ({url}):\n{file_analysis}")
187
+ else:
188
+ results.append(f"Could not download file: {url}")
189
  else:
190
  results.append(analyze_webpage(url))
191
  if results: