prxyasd commited on
Commit
e17782d
Β·
verified Β·
1 Parent(s): 3a5a2dc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +414 -29
app.py CHANGED
@@ -1,92 +1,468 @@
 
 
1
  import asyncio
2
  import gradio as gr
3
- import requests, re
4
- # …(μƒλž΅)…
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- # ────────────────────────────────────
7
- # 1. 단일 ν‚€ 처리 ν•¨μˆ˜λ‘œ κΈ°μ‘΄ 둜직 이동
8
- # ────────────────────────────────────
9
- async def process_single_key(key, rate_limit, claude_model):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  _key = key.strip()
11
 
12
- if re.match(re.compile("sk-or-v1-[a-z0-9]{64}"), _key):
 
13
  return {"key": _key, **get_key_openrouter_info(_key)}
14
 
15
- if re.match(re.compile("sk-ant-api03-[a-zA-Z0-9\\-_]{93}AA"), _key) or (
 
16
  _key.startswith("sk-ant-") and len(_key) == 93
17
- ) or (len(_key) == 89 and re.match(re.compile("sk-[a-zA-Z0-9]{86}"), _key)):
18
  return {"key": _key, **await get_key_ant_info(_key, rate_limit, claude_model)}
19
 
 
20
  if re.match(re.compile(r"sk-[a-zA-Z0-9]{48}"), _key) and len(_key) == 51 and "T3BlbkFJ" not in _key:
21
  return {"key": _key, **get_key_stability_info(_key)}
22
 
 
23
  if re.match(re.compile(r"sk-[a-f0-9]{32}"), _key):
24
  return {"key": _key, **get_key_deepseek_info(_key)}
25
 
 
26
  if _key.startswith("sk-"):
27
  return {"key": _key, **get_key_oai_info(_key)}
28
 
 
29
  if _key.startswith("AIzaSy"):
30
  return {"key": _key, **get_key_gemini_info(_key)}
31
 
 
32
  if _key.startswith("pst-"):
33
  return {"key": _key, **get_key_nai_info(_key)}
34
 
 
35
  if (_key.startswith("r8_") and len(_key) == 40) or (_key.islower() and len(_key) == 40):
36
  return {"key": _key, **get_key_replicate_info(_key)}
37
 
 
38
  if _key.startswith("xai-"):
39
  return {"key": _key, **get_key_xai_info(_key)}
40
 
 
41
  if len(_key.split(":")) == 2 and _key.split(":")[1].islower() and len(_key.split(":")[1]) == 32 and "openai.azure.com" not in _key.split(":")[1]:
42
  endpoint, api_key = _key.split(":")
43
  return {"key": _key, **get_key_azure_info(endpoint, api_key)}
44
 
 
45
  if "openai.azure.com" in _key.split(";")[0]:
46
  endpoint, api_key = _key.split(";")
47
  return {"key": _key, **get_key_azure_info(endpoint, api_key)}
48
 
 
49
  if _key.startswith("AKIA") and len(_key.split(":")[0]) == 20 and _key.split(":")[0].isupper():
50
  return {"key": _key, **await get_key_aws_info(_key)}
51
 
 
52
  if re.match(re.compile(r"[a-f0-9]{32}"), _key) or re.match(re.compile(r"sk_[a-f0-9]{48}"), _key):
53
  return {"key": _key, **get_key_elevenlabs_info(_key)}
54
 
 
55
  if re.match(re.compile(r"[a-zA-Z0-9]{32}"), _key):
56
  return {"key": _key, **get_key_mistral_info(_key)}
57
 
 
58
  if re.match(re.compile(r"gsk_[a-zA-Z0-9]{20}WGdyb3FY[a-zA-Z0-9]{24}"), _key):
59
  return {"key": _key, **get_key_groq_info(_key)}
60
 
61
- if re.match(re.compile(r"[\\w\\-]+:[\\w\\-@\\.]+:[\\w-]+:.+"), _key):
 
62
  return {"key": _key, **await get_key_gcp_info(_key, 0)}
63
 
64
- if re.match(re.compile(r"[\\w\\-]+:[\\w\\-@\\.]+:.+\\n"), _key):
 
65
  return {"key": _key, **await get_key_gcp_info(_key, 1)}
66
 
 
67
  return {"key": _key, **not_supported(_key)}
68
 
69
- # ────────────────────────────────────
70
- # 2. μ—¬λŸ¬ ν‚€ ν•œκΊΌλ²ˆμ— 처리
71
- # ────────────────────────────────────
72
- async def sort_keys(text, rate_limit, claude_model):
73
- keys = [k for k in text.splitlines() if k.strip()]
 
 
74
  tasks = [process_single_key(k, rate_limit, claude_model) for k in keys]
75
  results = await asyncio.gather(*tasks)
76
- return results # JSON μ»΄ν¬λ„ŒνŠΈκ°€ λ¦¬μŠ€νŠΈλ„ 잘 λ³΄μ—¬μ€Œ
 
77
 
78
- def clear_inputs(text): # κ·ΈλŒ€λ‘œ
 
 
 
79
  return ""
80
 
81
- # ────────────────────────────────────
82
- # 3. UI – μž…λ ₯μ°½ 쀄 수 늘리고, μƒˆ ν•¨μˆ˜ μ—°κ²°
83
- # ────────────────────────────────────
 
84
  with gr.Blocks() as demo:
85
  gr.Markdown(
86
  """
87
- # … (μ€‘λž΅) …
88
- """
 
 
 
 
 
 
 
 
89
  )
 
90
  claude_options = [
91
  "claude-3-haiku-20240307",
92
  "claude-3-sonnet-20240229",
@@ -95,26 +471,35 @@ with gr.Blocks() as demo:
95
  "claude-3-5-sonnet-20241022",
96
  "claude-3-5-haiku-20241022",
97
  ]
 
98
  with gr.Row():
99
  with gr.Column():
100
- key = gr.Textbox(
101
- lines=5, # μ—¬λŸ¬ 쀄 μž…λ ₯
102
  max_lines=20,
103
  label="API Key(s) β€” μ€„λ°”κΏˆμœΌλ‘œ μ—¬λŸ¬ 개 μž…λ ₯",
 
104
  )
105
  claude_model = gr.Dropdown(
106
  claude_options,
107
  value="claude-3-haiku-20240307",
108
- label="Claude API model (rate-limit check용)",
109
  )
110
  rate_limit = gr.Checkbox(label="Check concurrent rate limit (Claude, experimental)")
 
111
  with gr.Row():
112
  clear_button = gr.Button("Clear")
113
  submit_button = gr.Button("Submit", variant="primary")
 
114
  with gr.Column():
115
  info = gr.JSON(label="API Key Information", open=True)
116
 
117
- clear_button.click(fn=clear_inputs, inputs=[key], outputs=[key])
118
- submit_button.click(fn=sort_keys, inputs=[key, rate_limit, claude_model], outputs=[info], api_name="sort_keys")
 
 
 
 
 
119
 
120
- demo.launch()
 
1
+ 전체 μˆ˜μ •λœ μ½”λ“œμž…λ‹ˆλ‹€. μž„μ˜ μƒλž΅ 없이 원본 μˆœμ„œλŒ€λ‘œ λͺ¨λ‘ ν¬ν•¨λ˜μ–΄ μžˆμŠ΅λ‹ˆλ‹€.
2
+
3
  import asyncio
4
  import gradio as gr
5
+ import requests
6
+ import re
7
+
8
+ from api_usage import (
9
+ get_subscription,
10
+ check_key_availability,
11
+ get_orgs_me,
12
+ check_key_ant_availability,
13
+ check_ant_rate_limit,
14
+ check_key_gemini_availability,
15
+ check_key_azure_availability,
16
+ get_azure_status,
17
+ get_azure_deploy,
18
+ check_key_mistral_availability,
19
+ check_mistral_quota,
20
+ check_key_replicate_availability,
21
+ check_key_aws_availability,
22
+ check_key_or_availability,
23
+ check_key_or_limits,
24
+ check_gcp_anthropic,
25
+ check_groq_status,
26
+ check_nai_status,
27
+ check_elevenlabs_status,
28
+ check_xai_status,
29
+ check_stability_status,
30
+ check_deepseek_status,
31
+ )
32
+
33
+ # ─────────────────────────────────────────
34
+ # Key-specific helper functions (원본 μœ μ§€)
35
+ # ─────────────────────────────────────────
36
+ def get_key_oai_info(key):
37
+ session = requests.Session()
38
+ status, org_data = check_key_availability(session, key)
39
+
40
+ info_dict = {
41
+ "key_type": "OpenAI",
42
+ "key_availability": True if status else False,
43
+ "gpt4_availability": "",
44
+ "gpt4_32k_availability": "",
45
+ "default_org": "",
46
+ "org_description": "",
47
+ "organization": "",
48
+ "models": "",
49
+ "requests_per_minute": "",
50
+ "tokens_per_minute": "",
51
+ "quota": "",
52
+ "all_models": "",
53
+ }
54
+
55
+ if not status:
56
+ return info_dict
57
+
58
+ if status == 403:
59
+ status_me, orgs_me = get_orgs_me(session, key)
60
+ if status_me == 200:
61
+ org_data = orgs_me
62
+
63
+ subscription_info = get_subscription(key, session, org_data)
64
+
65
+ info_dict.update(
66
+ {
67
+ "gpt4_availability": subscription_info["has_gpt4"],
68
+ "gpt4_32k_availability": subscription_info["has_gpt4_32k"],
69
+ "default_org": subscription_info["default_org"],
70
+ "org_description": subscription_info["org_description"],
71
+ "organization": subscription_info["organization"],
72
+ "models": subscription_info["models"],
73
+ "requests_per_minute": subscription_info["rpm"],
74
+ "tokens_per_minute": subscription_info["tpm"],
75
+ "quota": subscription_info["quota"],
76
+ "all_models": subscription_info["all_models"],
77
+ }
78
+ )
79
+ return info_dict
80
+
81
+
82
+ async def get_key_ant_info(key, rate_limit, claude_model):
83
+ key_avai = await check_key_ant_availability(key, claude_model)
84
+ info_dict = {
85
+ "key_type": "Anthropic Claude",
86
+ "key_availability": key_avai[0],
87
+ "status": "",
88
+ "filter_response": "",
89
+ "requests_per_minute": "",
90
+ "tokens_per_minute": "",
91
+ "tokens_input_per_minute": "",
92
+ "tokens_output_per_minute": "",
93
+ "tier": "",
94
+ "concurrent_rate_limit": "",
95
+ "models": "",
96
+ }
97
+
98
+ info_dict["status"] = key_avai[1]
99
+ info_dict["filter_response"] = key_avai[2]
100
+ info_dict["requests_per_minute"] = key_avai[3] + ("" if key_avai[3] == "" else f" ({key_avai[4]} left)")
101
+ info_dict["tokens_per_minute"] = key_avai[5] + ("" if key_avai[5] == "" else f" ({key_avai[6]} left)")
102
+ info_dict["tokens_input_per_minute"] = key_avai[8] + ("" if key_avai[8] == "" else f" ({key_avai[9]} left)")
103
+ info_dict["tokens_output_per_minute"] = key_avai[10] + ("" if key_avai[10] == "" else f" ({key_avai[11]} left)")
104
+ info_dict["tier"] = key_avai[7]
105
+ info_dict["models"] = key_avai[12]
106
+
107
+ if rate_limit:
108
+ rate = await check_ant_rate_limit(key, claude_model)
109
+ info_dict["concurrent_rate_limit"] = rate
110
+
111
+ return info_dict
112
+
113
+
114
+ def get_key_gemini_info(key):
115
+ key_avai = check_key_gemini_availability(key)
116
+ info_dict = {
117
+ "key_type": "Google Gemini",
118
+ "key_availability": key_avai[0],
119
+ "status": key_avai[1],
120
+ "models": key_avai[2],
121
+ }
122
+ return info_dict
123
+
124
+
125
+ def get_key_azure_info(endpoint, api_key):
126
+ key_avai = check_key_azure_availability(endpoint, api_key)
127
+ info_dict = {
128
+ "key_type": "Microsoft Azure OpenAI",
129
+ "key_availability": key_avai[0],
130
+ "gpt35_availability": "",
131
+ "gpt4_availability": "",
132
+ "gpt4_32k_availability": "",
133
+ "dall_e_3_availability": "",
134
+ "moderation_status": "",
135
+ "models": "",
136
+ "deployments": "",
137
+ }
138
+ if key_avai[0]:
139
+ azure_deploy = get_azure_deploy(endpoint, api_key)
140
+ status = get_azure_status(endpoint, api_key, azure_deploy)
141
+ info_dict["gpt35_availability"] = status[1]
142
+ info_dict["gpt4_availability"] = status[2]
143
+ info_dict["gpt4_32k_availability"] = status[3]
144
+ info_dict["dall_e_3_availability"] = status[4]
145
+ info_dict["moderation_status"] = status[0]
146
+ info_dict["models"] = key_avai[1]
147
+ info_dict["deployments"] = azure_deploy
148
+ return info_dict
149
+
150
+
151
+ def get_key_mistral_info(key):
152
+ key_avai = check_key_mistral_availability(key)
153
+ info_dict = {
154
+ "key_type": "Mistral AI",
155
+ "key_availability": True if key_avai else False,
156
+ "has_quota": "",
157
+ "limits": "",
158
+ "models": "",
159
+ }
160
+ if key_avai:
161
+ quota_info = check_mistral_quota(key)
162
+ info_dict["has_quota"] = quota_info[0]
163
+ if quota_info[1]:
164
+ info_dict["limits"] = quota_info[1]
165
+ info_dict["models"] = key_avai
166
+ return info_dict
167
+
168
+
169
+ def get_key_replicate_info(key):
170
+ key_avai = check_key_replicate_availability(key)
171
+ info_dict = {
172
+ "key_type": "Replicate",
173
+ "key_availability": key_avai[0],
174
+ "account_name": "",
175
+ "type": "",
176
+ "has_quota": "",
177
+ "hardware_available": "",
178
+ }
179
+ if key_avai[0]:
180
+ info_dict["account_name"] = key_avai[1]["username"]
181
+ info_dict["type"] = key_avai[1]["type"]
182
+ info_dict["has_quota"] = key_avai[2]
183
+ info_dict["hardware_available"] = key_avai[3]
184
+ return info_dict
185
+
186
+
187
+ async def get_key_aws_info(key):
188
+ key_avai = await check_key_aws_availability(key)
189
+ info_dict = {
190
+ "key_type": "Amazon AWS Claude",
191
+ "key_availability": key_avai[0],
192
+ "username": "",
193
+ "root": "",
194
+ "admin": "",
195
+ "quarantine": "",
196
+ "iam_full_access": "",
197
+ "iam_user_change_password": "",
198
+ "aws_bedrock_full_access": "",
199
+ "enabled_region": "",
200
+ "models_usage": "",
201
+ "cost_and_usage": key_avai[1],
202
+ }
203
+ if key_avai[0]:
204
+ info_dict["username"] = key_avai[1]
205
+ info_dict["root"] = key_avai[2]
206
+ info_dict["admin"] = key_avai[3]
207
+ info_dict["quarantine"] = key_avai[4]
208
+ info_dict["iam_full_access"] = key_avai[5]
209
+ info_dict["iam_user_change_password"] = key_avai[6]
210
+ info_dict["aws_bedrock_full_access"] = key_avai[7]
211
+ info_dict["enabled_region"] = key_avai[8]
212
+ info_dict["models_usage"] = key_avai[9]
213
+ info_dict["cost_and_usage"] = key_avai[10]
214
+ return info_dict
215
+
216
+
217
+ def get_key_openrouter_info(key):
218
+ key_avai = check_key_or_availability(key)
219
+ info_dict = {
220
+ "key_type": "OpenRouter",
221
+ "key_availability": key_avai[0],
222
+ "is_free_tier": "",
223
+ "usage": "",
224
+ "balance": "",
225
+ "limit": "",
226
+ "limit_remaining": "",
227
+ "rate_limit_per_minite": "",
228
+ "4_turbo_per_request_tokens_limit": "",
229
+ "sonnet_per_request_tokens_limit": "",
230
+ "opus_per_request_tokens_limit": "",
231
+ }
232
+ if key_avai[0]:
233
+ models_info = check_key_or_limits(key)
234
+ info_dict["is_free_tier"] = key_avai[1]["is_free_tier"]
235
+ info_dict["limit"] = key_avai[1]["limit"]
236
+ info_dict["limit_remaining"] = key_avai[1]["limit_remaining"]
237
+ info_dict["usage"] = f"${format(key_avai[1]['usage'], '.4f')}"
238
+ info_dict["balance"] = (
239
+ f"${format(models_info[0], '.4f')}" if models_info[0] else f"${key_avai[2]/60} (estimated)"
240
+ )
241
+ info_dict["rate_limit_per_minite"] = key_avai[2]
242
+ info_dict["4_turbo_per_request_tokens_limit"] = models_info[1]["openai/gpt-4o"]
243
+ info_dict["sonnet_per_request_tokens_limit"] = models_info[1]["anthropic/claude-3.5-sonnet:beta"]
244
+ info_dict["opus_per_request_tokens_limit"] = models_info[1]["anthropic/claude-3-opus:beta"]
245
+ else:
246
+ info_dict["usage"] = key_avai[1]
247
+ return info_dict
248
+
249
 
250
+ async def get_key_gcp_info(key, type):
251
+ key_avai = await check_gcp_anthropic(key, type)
252
+ info_dict = {
253
+ "key_type": "Vertex AI (GCP)",
254
+ "key_availability": key_avai[0],
255
+ "status": "",
256
+ "enabled_region": "",
257
+ }
258
+ if key_avai[0]:
259
+ info_dict["enabled_region"] = key_avai[2]
260
+ else:
261
+ info_dict["status"] = key_avai[1]
262
+ return info_dict
263
+
264
+
265
+ def get_key_groq_info(key):
266
+ key_avai = check_groq_status(key)
267
+ info_dict = {
268
+ "key_type": "Groq",
269
+ "key_availability": True if key_avai else False,
270
+ "models": key_avai if key_avai else "",
271
+ }
272
+ return info_dict
273
+
274
+
275
+ def get_key_nai_info(key):
276
+ key_avai = check_nai_status(key)
277
+ info_dict = {
278
+ "key_type": "NovelAI",
279
+ "key_availability": True if key_avai[0] else False,
280
+ "user_info": key_avai[1] if key_avai[0] else "",
281
+ }
282
+ return info_dict
283
+
284
+
285
+ def get_key_elevenlabs_info(key):
286
+ key_avai = check_elevenlabs_status(key)
287
+ info_dict = {
288
+ "key_type": "ElevenLabs",
289
+ "key_availability": key_avai[0],
290
+ "user_info": key_avai[1],
291
+ "voices_info": key_avai[2],
292
+ }
293
+ return info_dict
294
+
295
+
296
+ def get_key_xai_info(key):
297
+ key_avai = check_xai_status(key)
298
+ info_dict = {
299
+ "key_type": "xAI Grok",
300
+ "key_availability": key_avai[0],
301
+ "key_status": "",
302
+ "models": "",
303
+ }
304
+ if key_avai[0]:
305
+ info_dict["key_status"] = key_avai[1]
306
+ info_dict["models"] = key_avai[2]
307
+ return info_dict
308
+
309
+
310
+ def get_key_stability_info(key):
311
+ key_avai = check_stability_status(key)
312
+ info_dict = {
313
+ "key_type": "Stability AI",
314
+ "key_availability": key_avai[0],
315
+ "account_info": "",
316
+ "credits": "",
317
+ "models": "",
318
+ }
319
+ if key_avai[0]:
320
+ info_dict["account_info"] = key_avai[1]
321
+ info_dict["credits"] = key_avai[2]
322
+ info_dict["models"] = key_avai[3]
323
+ return info_dict
324
+
325
+
326
+ def get_key_deepseek_info(key):
327
+ key_avai = check_deepseek_status(key)
328
+ info_dict = {
329
+ "key_type": "deepseek",
330
+ "key_availability": key_avai[0],
331
+ "balance": "",
332
+ "models": "",
333
+ }
334
+ if key_avai[0]:
335
+ info_dict["models"] = key_avai[1]
336
+ info_dict["balance"] = key_avai[2]
337
+ return info_dict
338
+
339
+
340
+ def not_supported(key):
341
+ return {
342
+ "key_type": "Not supported",
343
+ "status": "",
344
+ }
345
+
346
+
347
+ # ─────────────────────────────────────────
348
+ # μƒˆλ‘œ μΆ”κ°€: 단일 ν‚€ 비동기 처리
349
+ # ─────────────────────────────────────────
350
+ async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> dict:
351
+ """μ£Όμ–΄μ§„ key ν•˜λ‚˜λ₯Ό 뢄석해 정보 dict λ°˜ν™˜."""
352
  _key = key.strip()
353
 
354
+ # OpenRouter
355
+ if re.match(re.compile(r"sk-or-v1-[a-z0-9]{64}"), _key):
356
  return {"key": _key, **get_key_openrouter_info(_key)}
357
 
358
+ # Anthropic Claude
359
+ if re.match(re.compile(r"sk-ant-api03-[a-zA-Z0-9\-_]{93}AA"), _key) or (
360
  _key.startswith("sk-ant-") and len(_key) == 93
361
+ ) or (len(_key) == 89 and re.match(re.compile(r"sk-[a-zA-Z0-9]{86}"), _key)):
362
  return {"key": _key, **await get_key_ant_info(_key, rate_limit, claude_model)}
363
 
364
+ # Stability
365
  if re.match(re.compile(r"sk-[a-zA-Z0-9]{48}"), _key) and len(_key) == 51 and "T3BlbkFJ" not in _key:
366
  return {"key": _key, **get_key_stability_info(_key)}
367
 
368
+ # Deepseek
369
  if re.match(re.compile(r"sk-[a-f0-9]{32}"), _key):
370
  return {"key": _key, **get_key_deepseek_info(_key)}
371
 
372
+ # OpenAI
373
  if _key.startswith("sk-"):
374
  return {"key": _key, **get_key_oai_info(_key)}
375
 
376
+ # Google Gemini
377
  if _key.startswith("AIzaSy"):
378
  return {"key": _key, **get_key_gemini_info(_key)}
379
 
380
+ # NovelAI
381
  if _key.startswith("pst-"):
382
  return {"key": _key, **get_key_nai_info(_key)}
383
 
384
+ # Replicate
385
  if (_key.startswith("r8_") and len(_key) == 40) or (_key.islower() and len(_key) == 40):
386
  return {"key": _key, **get_key_replicate_info(_key)}
387
 
388
+ # xAI
389
  if _key.startswith("xai-"):
390
  return {"key": _key, **get_key_xai_info(_key)}
391
 
392
+ # Azure endpoint: "name:key"
393
  if len(_key.split(":")) == 2 and _key.split(":")[1].islower() and len(_key.split(":")[1]) == 32 and "openai.azure.com" not in _key.split(":")[1]:
394
  endpoint, api_key = _key.split(":")
395
  return {"key": _key, **get_key_azure_info(endpoint, api_key)}
396
 
397
+ # Azure endpoint: "https://xxx.openai.azure.com;key"
398
  if "openai.azure.com" in _key.split(";")[0]:
399
  endpoint, api_key = _key.split(";")
400
  return {"key": _key, **get_key_azure_info(endpoint, api_key)}
401
 
402
+ # AWS
403
  if _key.startswith("AKIA") and len(_key.split(":")[0]) == 20 and _key.split(":")[0].isupper():
404
  return {"key": _key, **await get_key_aws_info(_key)}
405
 
406
+ # ElevenLabs
407
  if re.match(re.compile(r"[a-f0-9]{32}"), _key) or re.match(re.compile(r"sk_[a-f0-9]{48}"), _key):
408
  return {"key": _key, **get_key_elevenlabs_info(_key)}
409
 
410
+ # Mistral
411
  if re.match(re.compile(r"[a-zA-Z0-9]{32}"), _key):
412
  return {"key": _key, **get_key_mistral_info(_key)}
413
 
414
+ # Groq
415
  if re.match(re.compile(r"gsk_[a-zA-Z0-9]{20}WGdyb3FY[a-zA-Z0-9]{24}"), _key):
416
  return {"key": _key, **get_key_groq_info(_key)}
417
 
418
+ # GCP - refresh token
419
+ if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:[\w-]+:.+"), _key):
420
  return {"key": _key, **await get_key_gcp_info(_key, 0)}
421
 
422
+ # GCP - service account
423
+ if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+\\n"), _key):
424
  return {"key": _key, **await get_key_gcp_info(_key, 1)}
425
 
426
+ # Not supported
427
  return {"key": _key, **not_supported(_key)}
428
 
429
+
430
+ # ─────────────────────────────────────────
431
+ # μ—¬λŸ¬ key 비동기 처리 ν•¨μˆ˜
432
+ # ─────────────────────────────────────────
433
+ async def sort_keys(text: str, rate_limit: bool, claude_model: str):
434
+ """ν…μŠ€νŠΈ λ°•μŠ€μ— μž…λ ₯된 μ—¬λŸ¬ ν‚€λ₯Ό 쀄 λ‹¨μœ„λ‘œ 뢄석."""
435
+ keys = [k.strip() for k in text.splitlines() if k.strip()]
436
  tasks = [process_single_key(k, rate_limit, claude_model) for k in keys]
437
  results = await asyncio.gather(*tasks)
438
+ return results # gr.JSON μ»΄ν¬λ„ŒνŠΈκ°€ λ¦¬μŠ€νŠΈλ„ μžλ™ ν‘œμ‹œ
439
+
440
 
441
+ # ─────────────────────────────────────────
442
+ # UI util
443
+ # ─────────────────────────────────────────
444
+ def clear_inputs(text: str):
445
  return ""
446
 
447
+
448
+ # ─────────────────────────────────────────
449
+ # Gradio UI
450
+ # ─────────────────────────────────────────
451
  with gr.Blocks() as demo:
452
  gr.Markdown(
453
  """
454
+ # OpenAI/Anthropic/Gemini/Azure/Mistral/Replicate/AWS Claude/OpenRouter/Vertex AI(GCP Anthropic)/Groq/NovelAI/ElevenLabs/xAI/Stability/Deepseek API Key Status Checker
455
+
456
+ *(Based on shaocongma, CncAnon1, su, Drago, kingbased key checkers)*
457
+
458
+ AWS credential format: `AWS_ACCESS_KEY_ID:AWS_SECRET_ACCESS_KEY` (root might not be accurate)
459
+ Azure format: `RESOURCE_NAME:API_KEY` **or** `https://RESOURCE_NAME.openai.azure.com;API_KEY`
460
+ GCP format:
461
+ β€’ Service account β†’ `PROJECT_ID:CLIENT_EMAIL:PRIVATE_KEY(\\n 포함)`
462
+ β€’ Refresh token β†’ `PROJECT_ID:CLIENT_ID:CLIENT_SECRET:REFRESH_TOKEN`
463
+ """
464
  )
465
+
466
  claude_options = [
467
  "claude-3-haiku-20240307",
468
  "claude-3-sonnet-20240229",
 
471
  "claude-3-5-sonnet-20241022",
472
  "claude-3-5-haiku-20241022",
473
  ]
474
+
475
  with gr.Row():
476
  with gr.Column():
477
+ key_box = gr.Textbox(
478
+ lines=5,
479
  max_lines=20,
480
  label="API Key(s) β€” μ€„λ°”κΏˆμœΌλ‘œ μ—¬λŸ¬ 개 μž…λ ₯",
481
+ placeholder="각 μ€„λ§ˆλ‹€ ν•˜λ‚˜μ˜ ν‚€λ₯Ό μž…λ ₯ν•˜μ„Έμš”",
482
  )
483
  claude_model = gr.Dropdown(
484
  claude_options,
485
  value="claude-3-haiku-20240307",
486
+ label="Claude API model (filter/concurrent check용)",
487
  )
488
  rate_limit = gr.Checkbox(label="Check concurrent rate limit (Claude, experimental)")
489
+
490
  with gr.Row():
491
  clear_button = gr.Button("Clear")
492
  submit_button = gr.Button("Submit", variant="primary")
493
+
494
  with gr.Column():
495
  info = gr.JSON(label="API Key Information", open=True)
496
 
497
+ clear_button.click(fn=clear_inputs, inputs=[key_box], outputs=[key_box])
498
+ submit_button.click(
499
+ fn=sort_keys,
500
+ inputs=[key_box, rate_limit, claude_model],
501
+ outputs=[info],
502
+ api_name="sort_keys",
503
+ )
504
 
505
+ demo.launch()