aliabd HF Staff commited on
Commit
fe7712b
·
verified ·
1 Parent(s): e2db590

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. README.md +7 -7
  2. requirements.txt +1 -0
  3. run.ipynb +1 -0
  4. run.py +16 -0
README.md CHANGED
@@ -1,12 +1,12 @@
 
1
  ---
2
- title: Test Chatinterface Streaming Echo Main
3
- emoji: 📚
4
- colorFrom: red
5
- colorTo: red
6
  sdk: gradio
7
  sdk_version: 4.19.2
8
- app_file: app.py
9
  pinned: false
 
10
  ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
+
2
  ---
3
+ title: test_chatinterface_streaming_echo_main
4
+ emoji: 🔥
5
+ colorFrom: indigo
6
+ colorTo: indigo
7
  sdk: gradio
8
  sdk_version: 4.19.2
9
+ app_file: run.py
10
  pinned: false
11
+ hf_oauth: true
12
  ---
 
 
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ https://gradio-builds.s3.amazonaws.com/eda33b3763897a542acf298e523fa493dc655aee/gradio-4.19.2-py3-none-any.whl
run.ipynb ADDED
@@ -0,0 +1 @@
 
 
1
+ {"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: test_chatinterface_streaming_echo"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "runs = 0\n", "\n", "\n", "def slow_echo(message, history):\n", " global runs # i didn't want to add state or anything to this demo\n", " runs = runs + 1\n", " for i in range(len(message)):\n", " yield f\"Run {runs} - You typed: \" + message[: i + 1]\n", "\n", "\n", "demo = gr.ChatInterface(slow_echo).queue()\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
run.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+
3
+ runs = 0
4
+
5
+
6
+ def slow_echo(message, history):
7
+ global runs # i didn't want to add state or anything to this demo
8
+ runs = runs + 1
9
+ for i in range(len(message)):
10
+ yield f"Run {runs} - You typed: " + message[: i + 1]
11
+
12
+
13
+ demo = gr.ChatInterface(slow_echo).queue()
14
+
15
+ if __name__ == "__main__":
16
+ demo.launch()