DB2323 commited on
Commit
42e540e
·
1 Parent(s): 4acc448

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -0
app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoModelForCausalLM, AutoTokenizer
2
+ import cradio as cr
3
+
4
+ # Load the first model and tokenizer
5
+ model_1 = AutoModelForCausalLM.from_pretrained("mosaicml/mpt-7b-chat")
6
+ tokenizer_1 = AutoTokenizer.from_pretrained("mosaicml/mpt-7b-chat")
7
+
8
+ # Load the second model and tokenizer
9
+ model_2 = AutoModelForCausalLM.from_pretrained("nomic-ai/gpt4all-j")
10
+ tokenizer_2 = AutoTokenizer.from_pretrained("nomic-ai/gpt4all-j")
11
+
12
+ def generate_responses(prompt):
13
+ # Encode the prompt using both tokenizers
14
+ input_ids_1 = tokenizer_1.encode(prompt, return_tensors="pt")
15
+ input_ids_2 = tokenizer_2.encode(prompt, return_tensors="pt")
16
+
17
+ # Generate responses using both models
18
+ output_1 = model_1.generate(input_ids_1, max_length=50, num_return_sequences=1)
19
+ output_2 = model_2.generate(input_ids_2, max_length=50, num_return_sequences=1)
20
+
21
+ # Decode the responses using the corresponding tokenizers
22
+ response_1 = tokenizer_1.decode(output_1[0], skip_special_tokens=True)
23
+ response_2 = tokenizer_2.decode(output_2[0], skip_special_tokens=True)
24
+
25
+ return response_1, response_2
26
+
27
+ iface = cr.Interface(
28
+ fn=generate_responses,
29
+ inputs=cr.TextInput("Enter your question:"),
30
+ outputs=[cr.Textbox(label="Response from model 1"), cr.Textbox(label="Response from model 2")],
31
+ title="AI Chatbot",
32
+ description="Get responses from two different AI models.",
33
+ )
34
+
35
+ iface.launch()