DB2323 commited on
Commit
db9c34a
·
1 Parent(s): 410c470

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -18
app.py CHANGED
@@ -4,22 +4,4 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
4
  model_1 = AutoModelForCausalLM.from_pretrained("mosaicml/mpt-7b-chat",trust_remote_code=True )
5
  tokenizer_1 = AutoTokenizer.from_pretrained("mosaicml/mpt-7b-chat")
6
 
7
- # Load the second model and tokenizer
8
- model_2 = AutoModelForCausalLM.from_pretrained("nomic-ai/gpt4all-j")
9
- tokenizer_2 = AutoTokenizer.from_pretrained("nomic-ai/gpt4all-j")
10
-
11
- def generate_responses(prompt):
12
- # Encode the prompt using both tokenizers
13
- input_ids_1 = tokenizer_1.encode(prompt, return_tensors="pt")
14
- input_ids_2 = tokenizer_2.encode(prompt, return_tensors="pt")
15
-
16
- # Generate responses using both models
17
- output_1 = model_1.generate(input_ids_1, max_length=50, num_return_sequences=1)
18
- output_2 = model_2.generate(input_ids_2, max_length=50, num_return_sequences=1)
19
-
20
- # Decode the responses using the corresponding tokenizers
21
- response_1 = tokenizer_1.decode(output_1[0], skip_special_tokens=True)
22
- response_2 = tokenizer_2.decode(output_2[0], skip_special_tokens=True)
23
-
24
- return response_1, response_2
25
 
 
4
  model_1 = AutoModelForCausalLM.from_pretrained("mosaicml/mpt-7b-chat",trust_remote_code=True )
5
  tokenizer_1 = AutoTokenizer.from_pretrained("mosaicml/mpt-7b-chat")
6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7