Spaces:
Runtime error
Runtime error
Upload folder using huggingface_hub
Browse files
README.md
CHANGED
@@ -1,12 +1,12 @@
|
|
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: gradio
|
7 |
sdk_version: 5.7.0
|
8 |
-
app_file:
|
9 |
pinned: false
|
|
|
10 |
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
+
|
2 |
---
|
3 |
+
title: llm_sambanova_main
|
4 |
+
emoji: 🔥
|
5 |
+
colorFrom: indigo
|
6 |
+
colorTo: indigo
|
7 |
sdk: gradio
|
8 |
sdk_version: 5.7.0
|
9 |
+
app_file: run.py
|
10 |
pinned: false
|
11 |
+
hf_oauth: true
|
12 |
---
|
|
|
|
requirements.txt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
gradio-client @ git+https://github.com/gradio-app/gradio@afd75def9e979d9b255a6d22d33a3aee63b6f225#subdirectory=client/python
|
2 |
+
https://gradio-pypi-previews.s3.amazonaws.com/afd75def9e979d9b255a6d22d33a3aee63b6f225/gradio-5.7.0-py3-none-any.whl
|
3 |
+
openai>=1.0.0
|
run.ipynb
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"cells": [{"cell_type": "markdown", "id": "302934307671667531413257853548643485645", "metadata": {}, "source": ["# Gradio Demo: llm_sambanova"]}, {"cell_type": "code", "execution_count": null, "id": "272996653310673477252411125948039410165", "metadata": {}, "outputs": [], "source": ["!pip install -q gradio openai>=1.0.0 "]}, {"cell_type": "code", "execution_count": null, "id": "288918539441861185822528903084949547379", "metadata": {}, "outputs": [], "source": ["# This is a simple general-purpose chatbot built on top of SambaNova API. \n", "# Before running this, make sure you have exported your SambaNova API key as an environment variable:\n", "# export SAMBANOVA_API_KEY=\"your-sambanova-api-key\"\n", "\n", "import os\n", "import gradio as gr\n", "from openai import OpenAI\n", "\n", "api_key = os.getenv(\"SAMBANOVA_API_KEY\")\n", "\n", "client = OpenAI(\n", " base_url=\"https://api.sambanova.ai/v1/\",\n", " api_key=api_key,\n", ")\n", "\n", "def predict(message, history):\n", " history.append({\"role\": \"user\", \"content\": message})\n", " stream = client.chat.completions.create(messages=history, model=\"Meta-Llama-3.1-70B-Instruct-8k\", stream=True)\n", " chunks = []\n", " for chunk in stream:\n", " chunks.append(chunk.choices[0].delta.content or \"\")\n", " yield \"\".join(chunks)\n", "\n", "demo = gr.ChatInterface(predict, type=\"messages\")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n", "\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}
|
run.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This is a simple general-purpose chatbot built on top of SambaNova API.
|
2 |
+
# Before running this, make sure you have exported your SambaNova API key as an environment variable:
|
3 |
+
# export SAMBANOVA_API_KEY="your-sambanova-api-key"
|
4 |
+
|
5 |
+
import os
|
6 |
+
import gradio as gr
|
7 |
+
from openai import OpenAI
|
8 |
+
|
9 |
+
api_key = os.getenv("SAMBANOVA_API_KEY")
|
10 |
+
|
11 |
+
client = OpenAI(
|
12 |
+
base_url="https://api.sambanova.ai/v1/",
|
13 |
+
api_key=api_key,
|
14 |
+
)
|
15 |
+
|
16 |
+
def predict(message, history):
|
17 |
+
history.append({"role": "user", "content": message})
|
18 |
+
stream = client.chat.completions.create(messages=history, model="Meta-Llama-3.1-70B-Instruct-8k", stream=True)
|
19 |
+
chunks = []
|
20 |
+
for chunk in stream:
|
21 |
+
chunks.append(chunk.choices[0].delta.content or "")
|
22 |
+
yield "".join(chunks)
|
23 |
+
|
24 |
+
demo = gr.ChatInterface(predict, type="messages")
|
25 |
+
|
26 |
+
if __name__ == "__main__":
|
27 |
+
demo.launch()
|
28 |
+
|