Spaces:
Running
on
Zero
Running
on
Zero
Add examples
Browse files
app.py
CHANGED
@@ -9,6 +9,7 @@ import numpy as np
|
|
9 |
import time
|
10 |
import pymupdf
|
11 |
import requests
|
|
|
12 |
|
13 |
import torch
|
14 |
from huggingface_hub import InferenceClient
|
@@ -118,6 +119,10 @@ def generate_podcast(url: str, pdf_path: str, topic: str):
|
|
118 |
t1 = time.time()
|
119 |
print(f"PROCESSED '{utterance}' in {int(t1-t0)} seconds. {audio_numpy.shape}")
|
120 |
|
|
|
|
|
|
|
|
|
121 |
demo = gr.Interface(
|
122 |
title="Open NotebookLM ποΈ",
|
123 |
description=f"""Generates a podcast discussion between two hosts about the materials of your choice.
|
@@ -148,8 +153,8 @@ Based on [Kokoro TTS](https://huggingface.co/hexgrad/Kokoro-82M), lightning-fast
|
|
148 |
theme=gr.themes.Soft(),
|
149 |
submit_btn="Generate podcast ποΈ",
|
150 |
# clear_btn=gr.Button("ποΈ"),
|
151 |
-
|
152 |
-
|
153 |
)
|
154 |
|
155 |
if __name__ == "__main__":
|
|
|
9 |
import time
|
10 |
import pymupdf
|
11 |
import requests
|
12 |
+
from pathlib import Path
|
13 |
|
14 |
import torch
|
15 |
from huggingface_hub import InferenceClient
|
|
|
119 |
t1 = time.time()
|
120 |
print(f"PROCESSED '{utterance}' in {int(t1-t0)} seconds. {audio_numpy.shape}")
|
121 |
|
122 |
+
EXAMPLES = [
|
123 |
+
["https://huggingface.co/blog/inference-providers-cohere", "", "How does using this compare with other inference solutions?"],
|
124 |
+
["", str(Path("examples/Essay_Palantir.pdf")), "Make sure to keep some critic spirit in the analysis!"],
|
125 |
+
]
|
126 |
demo = gr.Interface(
|
127 |
title="Open NotebookLM ποΈ",
|
128 |
description=f"""Generates a podcast discussion between two hosts about the materials of your choice.
|
|
|
153 |
theme=gr.themes.Soft(),
|
154 |
submit_btn="Generate podcast ποΈ",
|
155 |
# clear_btn=gr.Button("ποΈ"),
|
156 |
+
examples=EXAMPLES,
|
157 |
+
cache_examples=True,
|
158 |
)
|
159 |
|
160 |
if __name__ == "__main__":
|