Codingxx commited on
Commit
5be9ac5
·
verified ·
1 Parent(s): 3ff95be

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -1,18 +1,18 @@
1
  import gradio as gr
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
- # Load the model and tokenizer from Hugging Face
5
  model_name = "deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free"
6
  model = AutoModelForCausalLM.from_pretrained(model_name)
7
  tokenizer = AutoTokenizer.from_pretrained(model_name)
8
 
9
- # Define a function to generate text from the model
10
  def generate_text(prompt):
11
  inputs = tokenizer(prompt, return_tensors="pt")
12
  outputs = model.generate(inputs["input_ids"], max_length=50)
13
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
14
 
15
- # Create the Gradio interface
16
  iface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
17
 
18
  # Launch the app
 
1
  import gradio as gr
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
 
4
+ # Load your model from Hugging Face Hub
5
  model_name = "deepseek-ai/DeepSeek-R1-Distill-Llama-70B-free"
6
  model = AutoModelForCausalLM.from_pretrained(model_name)
7
  tokenizer = AutoTokenizer.from_pretrained(model_name)
8
 
9
+ # Function to generate text from the model
10
  def generate_text(prompt):
11
  inputs = tokenizer(prompt, return_tensors="pt")
12
  outputs = model.generate(inputs["input_ids"], max_length=50)
13
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
14
 
15
+ # Set up Gradio Interface
16
  iface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
17
 
18
  # Launch the app