gabor1 commited on
Commit
9f53bc9
·
verified ·
1 Parent(s): 73dbadf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -14
app.py CHANGED
@@ -2,32 +2,31 @@ import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
3
  import torch
4
 
5
- # Load tokenizer and model
6
  tokenizer = AutoTokenizer.from_pretrained("BAAI/bge-reranker-v2-m3")
7
  model = AutoModelForSequenceClassification.from_pretrained("BAAI/bge-reranker-v2-m3")
8
 
9
- # Define reranking function
10
- def rerank(query, documents_text):
11
- documents = documents_text.strip().split('\n')
12
- pairs = [(query, doc) for doc in documents]
13
  inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors="pt")
14
  with torch.no_grad():
15
  scores = model(**inputs).logits.squeeze(-1)
16
- results = sorted(zip(documents, scores.tolist()), key=lambda x: x[1], reverse=True)
17
- output = "\n\n".join([f"Score: {score:.4f}\n{doc}" for doc, score in results])
18
- return output
19
 
20
- # Gradio Interface
21
  iface = gr.Interface(
22
  fn=rerank,
23
  inputs=[
24
- gr.Textbox(label="Query", placeholder="Enter your search query", lines=1),
25
- gr.Textbox(label="Documents (one per line)", placeholder="Enter one document per line", lines=10)
26
  ],
27
- outputs=gr.Textbox(label="Reranked Output"),
28
  title="BGE Reranker v2 M3",
29
- description="Input a query and multiple documents. Returns reranked results with scores."
30
  )
31
 
32
- # Launch the interface (no share=True needed)
33
  iface.launch()
 
2
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
3
  import torch
4
 
5
+ # Load model and tokenizer
6
  tokenizer = AutoTokenizer.from_pretrained("BAAI/bge-reranker-v2-m3")
7
  model = AutoModelForSequenceClassification.from_pretrained("BAAI/bge-reranker-v2-m3")
8
 
9
+ # Reranking logic
10
+ def rerank(query, docs_text):
11
+ docs = docs_text.strip().split('\n')
12
+ pairs = [(query, doc) for doc in docs]
13
  inputs = tokenizer(pairs, padding=True, truncation=True, return_tensors="pt")
14
  with torch.no_grad():
15
  scores = model(**inputs).logits.squeeze(-1)
16
+ results = sorted(zip(docs, scores.tolist()), key=lambda x: x[1], reverse=True)
17
+ return "\n\n".join([f"Score: {score:.4f}\n{doc}" for doc, score in results])
 
18
 
19
+ # Create API-ready Interface
20
  iface = gr.Interface(
21
  fn=rerank,
22
  inputs=[
23
+ gr.Textbox(label="Query", lines=1),
24
+ gr.Textbox(label="Documents (one per line)", lines=10)
25
  ],
26
+ outputs="text",
27
  title="BGE Reranker v2 M3",
28
+ description="Rerank a list of documents based on a search query using BGE v2 M3."
29
  )
30
 
31
+ # Do NOT use `share=True`, do NOT set `ssr_mode`
32
  iface.launch()