bluuebunny commited on
Commit
62923ee
·
1 Parent(s): 3e261a2

changed model and specified device in model.encode to make use of gpu properly

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -6,14 +6,15 @@ import torch # For gpu
6
  # Make the app device agnostic
7
  device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
8
 
9
- # Load a pretrained Sentence Transformer model
10
- model = SentenceTransformer("all-MiniLM-L6-v2").to(device)
 
11
 
12
  # Function that does the embedding
13
  def predict(input_text):
14
 
15
- # Calculate embeddings by calling model.encode()
16
- embeddings = model.encode(input_text)
17
 
18
  return embeddings
19
 
 
6
  # Make the app device agnostic
7
  device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
8
 
9
+ # Load a pretrained Sentence Transformer model and move it to the appropriate device
10
+ model = SentenceTransformer('Alibaba-NLP/gte-large-en-v1.5', trust_remote_code=True)
11
+ model = model.to(device)
12
 
13
  # Function that does the embedding
14
  def predict(input_text):
15
 
16
+ # Calculate embeddings by calling model.encode(), specifying the device
17
+ embeddings = model.encode(input_text, device=device)
18
 
19
  return embeddings
20