Simon Salmon commited on
Commit
ffe12e1
·
1 Parent(s): 0191d40

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -12
app.py CHANGED
@@ -13,9 +13,7 @@ from transformers import AutoTokenizer
13
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
14
 
15
  from transformers import AutoTokenizer, AutoModelForMaskedLM
16
- artist_name = st.text_input("Model", "roberta-large")
17
  tokenizer = AutoTokenizer.from_pretrained("roberta-large")
18
- model = AutoModelForMaskedLM.from_pretrained(artist_name)
19
  model2 = AutoModelForMaskedLM.from_pretrained("BigSalmon/FormalRobertaa")
20
 
21
 
@@ -37,13 +35,4 @@ with st.form(key='my_form'):
37
  mask_hidden_state = last_hidden_state[mask_index]
38
  idx = torch.topk(mask_hidden_state, k=100, dim=0)[1]
39
  words = [tokenizer.decode(i.item()).strip() for i in idx]
40
- a_list.append(words)
41
- with torch.no_grad():
42
- output = model2(token_ids)
43
- last_hidden_state = output[0].squeeze()
44
- for mask_index in masked_pos:
45
- mask_hidden_state = last_hidden_state[mask_index]
46
- idx = torch.topk(mask_hidden_state, k=100, dim=0)[1]
47
- words2 = [tokenizer.decode(i.item()).strip() for i in idx]
48
- a_list.append(words2)
49
- st.text_area(label = 'Infill:', value=a_list)
 
13
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
14
 
15
  from transformers import AutoTokenizer, AutoModelForMaskedLM
 
16
  tokenizer = AutoTokenizer.from_pretrained("roberta-large")
 
17
  model2 = AutoModelForMaskedLM.from_pretrained("BigSalmon/FormalRobertaa")
18
 
19
 
 
35
  mask_hidden_state = last_hidden_state[mask_index]
36
  idx = torch.topk(mask_hidden_state, k=100, dim=0)[1]
37
  words = [tokenizer.decode(i.item()).strip() for i in idx]
38
+ st.text_area(label = 'Infill:', value=words)