alperall commited on
Commit
6d1c542
·
verified ·
1 Parent(s): 02b985f

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -137
app.py DELETED
@@ -1,137 +0,0 @@
1
- import base64
2
- import gradio as gr
3
- import json
4
- import mimetypes
5
- import os
6
- import requests
7
- import time
8
-
9
-
10
- MODEL_VERSION = os.environ['MODEL_VERSION']
11
- API_URL = os.environ['API_URL']
12
- API_KEY = os.environ['API_KEY']
13
- SYSTEM_PROMPT = os.environ.get('SYSTEM_PROMPT')
14
- MULTIMODAL_FLAG = os.environ.get('MULTIMODAL')
15
- MODEL_CONTROL_DEFAULTS = json.loads(os.environ['MODEL_CONTROL_DEFAULTS'])
16
- NAME_MAP = {
17
- 'system': os.environ.get('SYSTEM_NAME'),
18
- 'user': os.environ.get('USER_NAME'),
19
- }
20
-
21
-
22
- def respond(
23
- message,
24
- history,
25
- max_tokens,
26
- temperature,
27
- top_p,
28
- ):
29
- messages = []
30
- if SYSTEM_PROMPT is not None:
31
- messages.append({
32
- 'role': 'system',
33
- 'content': SYSTEM_PROMPT,
34
- })
35
- for val in history:
36
- messages.append({
37
- 'role': val['role'],
38
- 'content': convert_content(val['content']),
39
- })
40
- messages.append({
41
- 'role': 'user',
42
- 'content': convert_content(message),
43
- })
44
- for message in messages:
45
- add_name_for_message(message)
46
-
47
- data = {
48
- 'model': MODEL_VERSION,
49
- 'messages': messages,
50
- 'stream': True,
51
- 'max_tokens': max_tokens,
52
- 'temperature': temperature,
53
- 'top_p': top_p,
54
- }
55
- r = requests.post(
56
- API_URL,
57
- headers={
58
- 'Content-Type': 'application/json',
59
- 'Authorization': 'Bearer {}'.format(API_KEY),
60
- },
61
- data=json.dumps(data),
62
- stream=True,
63
- )
64
- reply = ''
65
- for row in r.iter_lines():
66
- if row.startswith(b'data:'):
67
- data = json.loads(row[5:])
68
- if 'choices' not in data:
69
- raise gr.Error('request failed')
70
- choice = data['choices'][0]
71
- if 'delta' in choice:
72
- reply += choice['delta']['content']
73
- yield reply
74
- elif 'message' in choice:
75
- yield choice['message']['content']
76
-
77
-
78
- def add_name_for_message(message):
79
- name = NAME_MAP.get(message['role'])
80
- if name is not None:
81
- message['name'] = name
82
-
83
-
84
- def convert_content(content):
85
- if isinstance(content, str):
86
- return content
87
- if isinstance(content, tuple):
88
- return [{
89
- 'type': 'image_url',
90
- 'image_url': {
91
- 'url': encode_base64(content[0]),
92
- },
93
- }]
94
- content_list = []
95
- for key, val in content.items():
96
- if key == 'text':
97
- content_list.append({
98
- 'type': 'text',
99
- 'text': val,
100
- })
101
- elif key == 'files':
102
- for f in val:
103
- content_list.append({
104
- 'type': 'image_url',
105
- 'image_url': {
106
- 'url': encode_base64(f),
107
- },
108
- })
109
- return content_list
110
-
111
-
112
- def encode_base64(path):
113
- guess_type = mimetypes.guess_type(path)[0]
114
- if not guess_type.startswith('image/'):
115
- raise gr.Error('not an image ({}): {}'.format(guess_type, path))
116
- with open(path, 'rb') as handle:
117
- data = handle.read()
118
- return 'data:{};base64,{}'.format(
119
- guess_type,
120
- base64.b64encode(data).decode(),
121
- )
122
-
123
-
124
- demo = gr.ChatInterface(
125
- respond,
126
- multimodal=MULTIMODAL_FLAG == 'ON',
127
- type='messages',
128
- additional_inputs=[
129
- gr.Slider(minimum=1, maximum=1000000, value=MODEL_CONTROL_DEFAULTS['tokens_to_generate'], step=1, label='Tokens to generate'),
130
- gr.Slider(minimum=0.1, maximum=1.0, value=MODEL_CONTROL_DEFAULTS['temperature'], step=0.05, label='Temperature'),
131
- gr.Slider(minimum=0.1, maximum=1.0, value=MODEL_CONTROL_DEFAULTS['top_p'], step=0.05, label='Top-p (nucleus sampling)'),
132
- ],
133
- )
134
-
135
-
136
- if __name__ == '__main__':
137
- demo.queue(default_concurrency_limit=50).launch()