codewithdark commited on
Commit
9c78cb3
·
verified ·
1 Parent(s): 9fe47f5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
- from transformers import AutoModelForCausalLM, AutoTokenizer
4
  import torch
5
 
6
  # Initialize Hugging Face Inference API client
@@ -9,7 +9,8 @@ hf_client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
9
  # Load the second model
10
  local_model_name = "codewithdark/latent-recurrent-depth-lm"
11
  tokenizer = AutoTokenizer.from_pretrained(local_model_name)
12
- model = AutoModelForCausalLM.from_pretrained(local_model_name)
 
13
  device = "cuda" if torch.cuda.is_available() else "cpu"
14
  model.to(device)
15
 
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
+ from transformers import AutoModel,AutoConfig, AutoTokenizer
4
  import torch
5
 
6
  # Initialize Hugging Face Inference API client
 
9
  # Load the second model
10
  local_model_name = "codewithdark/latent-recurrent-depth-lm"
11
  tokenizer = AutoTokenizer.from_pretrained(local_model_name)
12
+ config = AutoConfig.from_pretrained(local_model_name)
13
+ model = AutoModel.from_pretrained(local_model_name, config=config)
14
  device = "cuda" if torch.cuda.is_available() else "cpu"
15
  model.to(device)
16