Facelook commited on
Commit
e5bcdd9
·
1 Parent(s): 32df572

Trial and error.

Browse files
Files changed (1) hide show
  1. app.py +21 -16
app.py CHANGED
@@ -21,21 +21,26 @@ class BasicAgent:
21
  self.model_name = "Qwen/Qwen2.5-7B-Instruct"
22
 
23
  # Load model and tokenizer
24
- try:
25
- self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)
26
- self.model = AutoModelForCausalLM.from_pretrained(
27
- self.model_name,
28
- torch_dtype="auto",
29
- device_map="auto"
30
- )
31
- print(f"Successfully loaded {self.model_name}")
32
- except Exception as e:
33
- print(f"Error loading model: {e}")
34
- # Fallback to HuggingFace Inference API if local loading fails
35
- print("Falling back to InferenceClient")
36
- self.client = InferenceClient(model=self.model_name)
37
- self.tokenizer = None
38
- self.model = None
 
 
 
 
 
39
 
40
  def __call__(self, question: str) -> str:
41
  print(f"Agent received question (first 50 chars): {question[:50]}...")
@@ -204,7 +209,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
204
 
205
  # --- Build Gradio Interface using Blocks ---
206
  with gr.Blocks() as demo:
207
- gr.Markdown("# Basic Agent Evaluation Runner")
208
  gr.Markdown(
209
  """
210
  **Instructions:**
 
21
  self.model_name = "Qwen/Qwen2.5-7B-Instruct"
22
 
23
  # Load model and tokenizer
24
+ #try:
25
+ # self.tokenizer = AutoTokenizer.from_pretrained(self.model_name)
26
+ # self.model = AutoModelForCausalLM.from_pretrained(
27
+ # self.model_name,
28
+ # torch_dtype="auto",
29
+ # device_map="auto"
30
+ # )
31
+ # print(f"Successfully loaded {self.model_name}")
32
+ #except Exception as e:
33
+ # print(f"Error loading model: {e}")
34
+ # # Fallback to HuggingFace Inference API if local loading fails
35
+ # print("Falling back to InferenceClient")
36
+ # self.client = InferenceClient(model=self.model_name)
37
+ # self.tokenizer = None
38
+ # self.model = None
39
+ print("Falling back to InferenceClient")
40
+ self.client = InferenceClient(model=self.model_name)
41
+ self.tokenizer = None
42
+ self.model = None
43
+
44
 
45
  def __call__(self, question: str) -> str:
46
  print(f"Agent received question (first 50 chars): {question[:50]}...")
 
209
 
210
  # --- Build Gradio Interface using Blocks ---
211
  with gr.Blocks() as demo:
212
+ gr.Markdown("# Basic Agent Evaluation Runner #")
213
  gr.Markdown(
214
  """
215
  **Instructions:**