Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
import queue
|
2 |
import threading
|
3 |
-
|
4 |
import gradio as gr
|
5 |
from dia.model import Dia
|
6 |
from huggingface_hub import InferenceClient
|
@@ -9,7 +9,7 @@ from huggingface_hub import InferenceClient
|
|
9 |
PODCAST_SUBJECT = "The future of AI and its impact on society"
|
10 |
|
11 |
# Initialize the inference client
|
12 |
-
client = InferenceClient("Qwen/Qwen2.5-Coder-32B-Instruct", provider="together")
|
13 |
model = Dia.from_pretrained("nari-labs/Dia-1.6B", compute_dtype="float16")
|
14 |
|
15 |
# Queue for audio streaming
|
|
|
1 |
import queue
|
2 |
import threading
|
3 |
+
import os
|
4 |
import gradio as gr
|
5 |
from dia.model import Dia
|
6 |
from huggingface_hub import InferenceClient
|
|
|
9 |
PODCAST_SUBJECT = "The future of AI and its impact on society"
|
10 |
|
11 |
# Initialize the inference client
|
12 |
+
client = InferenceClient("Qwen/Qwen2.5-Coder-32B-Instruct", provider="together", token=os.getenv("HF_TOKEN"))
|
13 |
model = Dia.from_pretrained("nari-labs/Dia-1.6B", compute_dtype="float16")
|
14 |
|
15 |
# Queue for audio streaming
|