SecureLLMSys commited on
Commit
ee19553
·
1 Parent(s): 2fae289
src/attribution/attntrace.py CHANGED
@@ -6,6 +6,7 @@ import torch.nn.functional as F
6
  import gc
7
  from src.prompts import wrap_prompt_attention
8
  from .attention_utils import *
 
9
 
10
  class AttnTraceAttribution(Attribution):
11
  def __init__(self, llm,explanation_level = "segment",K=5, avg_k=5, q=0.4, B=30, verbose =1):
 
6
  import gc
7
  from src.prompts import wrap_prompt_attention
8
  from .attention_utils import *
9
+ import spaces
10
 
11
  class AttnTraceAttribution(Attribution):
12
  def __init__(self, llm,explanation_level = "segment",K=5, avg_k=5, q=0.4, B=30, verbose =1):
src/models/Llama.py CHANGED
@@ -4,6 +4,7 @@ from .Model import Model
4
  import os
5
  import signal
6
  from functools import lru_cache
 
7
 
8
  def handle_timeout(sig, frame):
9
  raise TimeoutError('took too long')
 
4
  import os
5
  import signal
6
  from functools import lru_cache
7
+ import spaces
8
 
9
  def handle_timeout(sig, frame):
10
  raise TimeoutError('took too long')