Futuresony commited on
Commit
8a11c03
·
verified ·
1 Parent(s): 8c8e15f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -135
app.py CHANGED
@@ -1,152 +1,65 @@
1
- from flask import Flask, request, jsonify, render_template
2
  from huggingface_hub import InferenceClient
3
- import os
4
 
5
- # Initialize the Flask app
6
- app = Flask(__name__)
7
-
8
- # Initialize the Hugging Face Inference Client
9
- client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
10
-
11
- # HTML template for the app
12
- HTML_TEMPLATE = """
13
- <!DOCTYPE html>
14
- <html lang="en">
15
- <head>
16
- <meta charset="UTF-8">
17
- <meta name="viewport" content="width=device-width, initial-scale=1.0">
18
- <title>Chat Interface</title>
19
- <style>
20
- body {
21
- font-family: Arial, sans-serif;
22
- margin: 20px;
23
- }
24
- .container {
25
- max-width: 800px;
26
- margin: auto;
27
- }
28
- .chat-box {
29
- border: 1px solid #ccc;
30
- padding: 10px;
31
- border-radius: 5px;
32
- background-color: #f9f9f9;
33
- height: 400px;
34
- overflow-y: auto;
35
- }
36
- .chat-input {
37
- width: 100%;
38
- padding: 10px;
39
- margin-top: 10px;
40
- border: 1px solid #ccc;
41
- border-radius: 5px;
42
- }
43
- .response {
44
- background-color: #e8f5e9;
45
- border: 1px solid #81c784;
46
- padding: 8px;
47
- margin-top: 5px;
48
- border-radius: 5px;
49
- white-space: pre-wrap; /* Preserve formatting */
50
- }
51
- .bold-italic {
52
- font-weight: bold;
53
- font-style: italic;
54
- }
55
- .code-box {
56
- background-color: #f4f4f4;
57
- border: 1px solid #ddd;
58
- padding: 10px;
59
- font-family: monospace;
60
- white-space: pre-wrap;
61
- overflow-x: auto;
62
- border-radius: 5px;
63
- }
64
- </style>
65
- </head>
66
- <body>
67
- <div class="container">
68
- <h1>Chat Interface</h1>
69
- <div class="chat-box" id="chat-box">
70
- <!-- Chat history will appear here -->
71
- </div>
72
- <textarea id="message" class="chat-input" placeholder="Type your message here..."></textarea>
73
- <button onclick="sendMessage()">Send</button>
74
- </div>
75
- <script>
76
- function sendMessage() {
77
- const message = document.getElementById("message").value;
78
- const chatBox = document.getElementById("chat-box");
79
-
80
- if (!message.trim()) return; // Don't send empty messages
81
-
82
- // Append the user's message to the chat box
83
- const userMessage = document.createElement("div");
84
- userMessage.textContent = "You: " + message;
85
- chatBox.appendChild(userMessage);
86
-
87
- // Clear the input field
88
- document.getElementById("message").value = "";
89
-
90
- // Send the message to the server
91
- fetch("/respond", {
92
- method: "POST",
93
- headers: {
94
- "Content-Type": "application/json",
95
- },
96
- body: JSON.stringify({ message: message }),
97
- })
98
- .then((response) => response.json())
99
- .then((data) => {
100
- // Append the assistant's response to the chat box
101
- const botMessage = document.createElement("div");
102
- botMessage.innerHTML = `<div class="response">${data.response}</div>`;
103
- chatBox.appendChild(botMessage);
104
-
105
- // Scroll to the bottom of the chat box
106
- chatBox.scrollTop = chatBox.scrollHeight;
107
- })
108
- .catch((error) => console.error("Error:", error));
109
- }
110
- </script>
111
- </body>
112
- </html>
113
  """
 
 
 
114
 
115
- @app.route("/")
116
- def home():
117
- return HTML_TEMPLATE
118
 
119
- @app.route("/respond", methods=["POST"])
120
- def respond():
121
- # Extract data from the request
122
- data = request.json
123
- message = data.get("message", "")
 
 
 
 
124
 
125
- # Define system message
126
- system_message = "You are a friendly chatbot."
 
 
 
127
 
128
- # Build the chat history and message
129
- messages = [{"role": "system", "content": system_message}]
130
  messages.append({"role": "user", "content": message})
131
 
132
- # Call the Hugging Face API
133
  response = ""
134
- for msg in client.chat_completion(
135
- messages=messages,
136
- max_tokens=512,
 
137
  stream=True,
138
- temperature=0.7,
139
- top_p=0.95,
140
  ):
141
- token = msg.choices[0].delta.content
 
142
  response += token
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
143
 
144
- # Return the response in bold italic format
145
- response = response.replace("**", '<span class="bold-italic">').replace("**", "</span>")
146
- return jsonify({"response": response})
147
 
148
  if __name__ == "__main__":
149
- # Use PORT environment variable or default to 7860
150
- port = int(os.getenv("PORT", 7860))
151
- app.run(host="0.0.0.0", port=port)
152
 
 
1
+ import gradio as gr
2
  from huggingface_hub import InferenceClient
 
3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4
  """
5
+ For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
+ """
7
+ client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
8
 
 
 
 
9
 
10
+ def respond(
11
+ message,
12
+ history: list[tuple[str, str]],
13
+ system_message,
14
+ max_tokens,
15
+ temperature,
16
+ top_p,
17
+ ):
18
+ messages = [{"role": "system", "content": system_message}]
19
 
20
+ for val in history:
21
+ if val[0]:
22
+ messages.append({"role": "user", "content": val[0]})
23
+ if val[1]:
24
+ messages.append({"role": "assistant", "content": val[1]})
25
 
 
 
26
  messages.append({"role": "user", "content": message})
27
 
 
28
  response = ""
29
+
30
+ for message in client.chat_completion(
31
+ messages,
32
+ max_tokens=max_tokens,
33
  stream=True,
34
+ temperature=temperature,
35
+ top_p=top_p,
36
  ):
37
+ token = message.choices[0].delta.content
38
+
39
  response += token
40
+ yield response
41
+
42
+
43
+ """
44
+ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
45
+ """
46
+ demo = gr.ChatInterface(
47
+ respond,
48
+ additional_inputs=[
49
+ gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
50
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
51
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
52
+ gr.Slider(
53
+ minimum=0.1,
54
+ maximum=1.0,
55
+ value=0.95,
56
+ step=0.05,
57
+ label="Top-p (nucleus sampling)",
58
+ ),
59
+ ],
60
+ )
61
 
 
 
 
62
 
63
  if __name__ == "__main__":
64
+ demo.launch()
 
 
65