broadfield-dev commited on
Commit
46634ed
·
verified ·
1 Parent(s): b7fd5ec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -15
app.py CHANGED
@@ -6,19 +6,27 @@ model_name = 'deepseek-ai/deepseek-coder-33b-instruct'
6
  tokenizer = AutoTokenizer.from_pretrained(model_name)
7
  model = AutoModelForCausalLM.from_pretrained(model_name)
8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
- input_ids = tokenizer.encode(input_text, return_tensors="pt")
11
-
12
- kwargs = {
13
- "max_length": 500,
14
- "num_return_sequences": 1,
15
- "temperature": 0.7,
16
- "top_k": 50
17
- }
18
-
19
- # Generate text
20
- output_ids = model.generate(input_ids, **kwargs)
21
-
22
- # Decode and print the output
23
- output_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
24
- print(output_text)
 
6
  tokenizer = AutoTokenizer.from_pretrained(model_name)
7
  model = AutoModelForCausalLM.from_pretrained(model_name)
8
 
9
+ def call_llm(input_text):
10
+ input_ids = tokenizer.encode(input_text, return_tensors="pt")
11
+
12
+ kwargs = {
13
+ "max_length": 500,
14
+ "num_return_sequences": 1,
15
+ "temperature": 0.7,
16
+ "top_k": 50
17
+ }
18
+
19
+ # Generate text
20
+ output_ids = model.generate(input_ids, **kwargs)
21
+
22
+ # Decode and print the output
23
+ output_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
24
+ print(output_text)
25
+ return output_text
26
 
27
+ with gr.Blocks() as app:
28
+ chat = gr.ChatInterface(
29
+ call_llm,
30
+ )
31
+ app.launch()
32
+