shashwatIDR commited on
Commit
765202e
·
verified ·
1 Parent(s): 55a9b45

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -3
app.py CHANGED
@@ -1,10 +1,16 @@
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
3
  import torch
 
 
 
 
 
4
 
5
  model_id = "bigcode/starcoder"
6
- tokenizer = AutoTokenizer.from_pretrained(model_id)
7
- model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto")
8
 
9
  def generate_code(prompt):
10
  inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
@@ -12,5 +18,4 @@ def generate_code(prompt):
12
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
13
 
14
  iface = gr.Interface(fn=generate_code, inputs="text", outputs="text", title="StarCoder Code Generator")
15
-
16
  iface.launch(server_name="0.0.0.0", server_port=7860)
 
1
  import gradio as gr
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ from huggingface_hub import login
4
  import torch
5
+ import os
6
+
7
+ # Use token from environment variable (set securely in Hugging Face Space)
8
+ token = os.environ.get("HF_TOKEN")
9
+ login(token)
10
 
11
  model_id = "bigcode/starcoder"
12
+ tokenizer = AutoTokenizer.from_pretrained(model_id, use_auth_token=token)
13
+ model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16, device_map="auto", use_auth_token=token)
14
 
15
  def generate_code(prompt):
16
  inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
 
18
  return tokenizer.decode(outputs[0], skip_special_tokens=True)
19
 
20
  iface = gr.Interface(fn=generate_code, inputs="text", outputs="text", title="StarCoder Code Generator")
 
21
  iface.launch(server_name="0.0.0.0", server_port=7860)