Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,9 @@ from huggingface_hub import InferenceClient
|
|
4 |
import gradio as gr
|
5 |
from huggingface_hub import InferenceClient
|
6 |
import os
|
7 |
-
import zipfile
|
|
|
|
|
8 |
|
9 |
# zip νμΌ μλ ν΄μ μ½λ
|
10 |
zip_path = "solo_leveling_faiss_ko.zip"
|
@@ -22,6 +24,12 @@ For more information on `huggingface_hub` Inference API support, please check th
|
|
22 |
"""
|
23 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
|
26 |
def respond(
|
27 |
message,
|
@@ -77,4 +85,5 @@ demo = gr.ChatInterface(
|
|
77 |
|
78 |
|
79 |
if __name__ == "__main__":
|
|
|
80 |
demo.launch()
|
|
|
4 |
import gradio as gr
|
5 |
from huggingface_hub import InferenceClient
|
6 |
import os
|
7 |
+
import zipfile
|
8 |
+
import nbformat
|
9 |
+
from nbconvert.preprocessors import ExecutePreprocessor
|
10 |
|
11 |
# zip νμΌ μλ ν΄μ μ½λ
|
12 |
zip_path = "solo_leveling_faiss_ko.zip"
|
|
|
24 |
"""
|
25 |
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
|
26 |
|
27 |
+
def run_notebook(path):
|
28 |
+
with open(path, encoding='utf-8') as f:
|
29 |
+
nb = nbformat.read(f, as_version=4)
|
30 |
+
ep = ExecutePreprocessor(timeout=600, kernel_name='python3')
|
31 |
+
ep.preprocess(nb, {'metadata': {'path': './'}}) # νμ¬ κ²½λ‘μμ μ€ν
|
32 |
+
print(f"[INFO] Notebook {path} μ€ν μλ£")
|
33 |
|
34 |
def respond(
|
35 |
message,
|
|
|
85 |
|
86 |
|
87 |
if __name__ == "__main__":
|
88 |
+
run_notebook("r-story-test.ipynb")
|
89 |
demo.launch()
|