xzuyn commited on
Commit
765b56a
·
verified ·
1 Parent(s): 6475fdc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -1
app.py CHANGED
@@ -24,6 +24,9 @@ def tokenize(input_text):
24
  phi2_tokens = len(
25
  phi2_tokenizer(input_text, add_special_tokens=True)["input_ids"]
26
  )
 
 
 
27
  t5_tokens = len(
28
  t5_tokenizer(input_text, add_special_tokens=True)["input_ids"]
29
  )
@@ -48,6 +51,7 @@ def tokenize(input_text):
48
  "GPT-NeoX": gpt_neox_tokens,
49
  "Falcon": falcon_tokens,
50
  "Phi-1/Phi-2": phi2_tokens,
 
51
  "T5": t5_tokens,
52
  "Gemma": gemma_tokens,
53
  "Command-R": command_r_tokens,
@@ -83,6 +87,9 @@ if __name__ == "__main__":
83
  phi2_tokenizer = AutoTokenizer.from_pretrained(
84
  "microsoft/phi-2"
85
  )
 
 
 
86
  t5_tokenizer = AutoTokenizer.from_pretrained(
87
  "google/flan-t5-xxl"
88
  )
@@ -100,6 +107,6 @@ if __name__ == "__main__":
100
  )
101
 
102
  iface = gr.Interface(
103
- fn=tokenize, inputs=gr.Textbox(label="Input Text", lines=12), outputs="text"
104
  )
105
  iface.launch()
 
24
  phi2_tokens = len(
25
  phi2_tokenizer(input_text, add_special_tokens=True)["input_ids"]
26
  )
27
+ phi3_tokens = len(
28
+ phi3_tokenizer(input_text, add_special_tokens=True)["input_ids"]
29
+ )
30
  t5_tokens = len(
31
  t5_tokenizer(input_text, add_special_tokens=True)["input_ids"]
32
  )
 
51
  "GPT-NeoX": gpt_neox_tokens,
52
  "Falcon": falcon_tokens,
53
  "Phi-1/Phi-2": phi2_tokens,
54
+ "Phi-3": phi3_tokens,
55
  "T5": t5_tokens,
56
  "Gemma": gemma_tokens,
57
  "Command-R": command_r_tokens,
 
87
  phi2_tokenizer = AutoTokenizer.from_pretrained(
88
  "microsoft/phi-2"
89
  )
90
+ phi2_tokenizer = AutoTokenizer.from_pretrained(
91
+ "microsoft/Phi-3-mini-4k-instruct"
92
+ )
93
  t5_tokenizer = AutoTokenizer.from_pretrained(
94
  "google/flan-t5-xxl"
95
  )
 
107
  )
108
 
109
  iface = gr.Interface(
110
+ fn=tokenize, inputs=gr.Textbox(label="Input Text", lines=13), outputs="text"
111
  )
112
  iface.launch()