srush commited on
Commit
2afdd6b
·
1 Parent(s): 8947111

Upload with huggingface_hub

Browse files
Files changed (3) hide show
  1. app.py +35 -25
  2. gatsby.pmpt.tpl +1 -1
  3. requirements.txt +2 -1
app.py CHANGED
@@ -1,11 +1,18 @@
1
- # Questions answering with Hugging Face embeddings. Adapted from the
2
- # [LlamaIndex
3
- # example](https://github.com/jerryjliu/gpt_index/blob/main/examples/gatsby/TestGatsby.ipynb).
 
 
 
 
 
 
 
 
4
 
5
  import datasets
6
  import numpy as np
7
-
8
- from minichain import EmbeddingPrompt, TemplatePrompt, show_log, start_chain
9
 
10
  # Load data with embeddings (computed beforehand)
11
 
@@ -14,32 +21,35 @@ gatsby.add_faiss_index("embeddings")
14
 
15
  # Fast KNN retieval prompt
16
 
17
- class KNNPrompt(EmbeddingPrompt):
18
- def prompt(self, inp):
19
- return inp["query"]
20
-
21
- def find(self, out, inp):
22
- res = gatsby.get_nearest_examples("embeddings", np.array(out), 1)
23
- return {"question": inp["query"], "docs": res.examples["passages"]}
24
-
25
- # QA prompt to ask question with examples
26
 
 
 
 
 
27
 
28
- class QAPrompt(TemplatePrompt):
29
- template_file = "gatsby.pmpt.tpl"
 
30
 
31
 
32
- with start_chain("gatsby") as backend:
33
- # question = "What did Gatsby do before he met Daisy?"
34
- prompt = KNNPrompt(
35
- backend.HuggingFaceEmbed("sentence-transformers/all-mpnet-base-v2")
36
- ).chain(QAPrompt(backend.OpenAI()))
37
- # result = prompt(question)
38
- # print(result)
39
 
40
 
41
- prompt.to_gradio(fields=["query"],
42
- examples=["What did Gatsby do before he met Daisy?"]).launch()
 
 
 
 
 
 
 
 
43
 
44
 
45
 
 
1
+ # + tags=["hide_inp"]
2
+ desc = """
3
+ ### Book QA
4
+
5
+ Chain that does question answering with Hugging Face embeddings. [[Code](https://github.com/srush/MiniChain/blob/main/examples/gatsby.py)]
6
+
7
+ (Adapted from the [LlamaIndex example](https://github.com/jerryjliu/gpt_index/blob/main/examples/gatsby/TestGatsby.ipynb).)
8
+ """
9
+ # -
10
+
11
+ # $
12
 
13
  import datasets
14
  import numpy as np
15
+ from minichain import prompt, show, HuggingFaceEmbed, OpenAI
 
16
 
17
  # Load data with embeddings (computed beforehand)
18
 
 
21
 
22
  # Fast KNN retieval prompt
23
 
24
+ @prompt(HuggingFaceEmbed("sentence-transformers/all-mpnet-base-v2"))
25
+ def get_neighbors(model, inp, k=1):
26
+ embedding = model(inp)
27
+ res = olympics.get_nearest_examples("embeddings", np.array(embedding), k)
28
+ return res.examples["passages"]
 
 
 
 
29
 
30
+ @prompt(OpenAI(),
31
+ template_file="gatsby.pmpt.tpl")
32
+ def ask(model, query, neighbors):
33
+ return model(dict(question=query, docs=neighbors))
34
 
35
+ def gatsby(query):
36
+ n = get_neighbors(query)
37
+ return ask(query, n)
38
 
39
 
40
+ # $
 
 
 
 
 
 
41
 
42
 
43
+ gradio = show(gatsby,
44
+ subprompts=[get_neighbors, ask],
45
+ examples=["What did Gatsby do before he met Daisy?",
46
+ "What did the narrator do after getting back to Chicago?"],
47
+ keys={"HF_KEY"},
48
+ description=desc,
49
+ code=open("gatsby.py", "r").read().split("$")[1].strip().strip("#").strip()
50
+ )
51
+ if __name__ == "__main__":
52
+ gradio.launch()
53
 
54
 
55
 
gatsby.pmpt.tpl CHANGED
@@ -1,4 +1,4 @@
1
- Context information is below.
2
 
3
 
4
  ---------------------
 
1
+ Context information is below.
2
 
3
 
4
  ---------------------
requirements.txt CHANGED
@@ -1,3 +1,4 @@
1
- gradio
2
  git+https://github.com/srush/minichain@gradio
3
  manifest-ml
 
 
1
+ gradio==3.21.0
2
  git+https://github.com/srush/minichain@gradio
3
  manifest-ml
4
+ faiss-cpu