Update app.py
Browse files
app.py
CHANGED
@@ -32,15 +32,18 @@ from api_usage import (
|
|
32 |
)
|
33 |
|
34 |
# βββββββββββββββββββββββββββββββββββββββββ
|
35 |
-
# Key-specific helper functions (
|
36 |
# βββββββββββββββββββββββββββββββββββββββββ
|
37 |
def get_key_oai_info(key: str) -> Dict[str, Any]:
|
38 |
session = requests.Session()
|
39 |
-
|
|
|
|
|
40 |
|
41 |
info_dict = {
|
42 |
"key_type": "OpenAI",
|
43 |
-
"key_availability":
|
|
|
44 |
"gpt4_availability": "",
|
45 |
"gpt4_32k_availability": "",
|
46 |
"default_org": "",
|
@@ -51,17 +54,63 @@ def get_key_oai_info(key: str) -> Dict[str, Any]:
|
|
51 |
"tokens_per_minute": "",
|
52 |
"quota": "",
|
53 |
"all_models": "",
|
|
|
54 |
}
|
55 |
|
56 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
return info_dict
|
58 |
|
59 |
-
if
|
60 |
-
|
|
|
|
|
|
|
|
|
61 |
if status_me == 200:
|
62 |
-
|
|
|
|
|
|
|
|
|
|
|
63 |
|
64 |
-
subscription_info = get_subscription(key, session,
|
65 |
if subscription_info:
|
66 |
info_dict.update(
|
67 |
{
|
@@ -73,10 +122,39 @@ def get_key_oai_info(key: str) -> Dict[str, Any]:
|
|
73 |
"models": subscription_info.get("models", ""),
|
74 |
"requests_per_minute": subscription_info.get("rpm", ""),
|
75 |
"tokens_per_minute": subscription_info.get("tpm", ""),
|
76 |
-
"quota": subscription_info.get("quota", ""),
|
77 |
"all_models": subscription_info.get("all_models", ""),
|
78 |
}
|
79 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
80 |
return info_dict
|
81 |
|
82 |
|
@@ -367,10 +445,10 @@ def not_supported(key: str) -> Dict[str, Any]:
|
|
367 |
|
368 |
|
369 |
# βββββββββββββββββββββββββββββββββββββββββ
|
370 |
-
# λ¨μΌ ν€ λΉλκΈ° μ²λ¦¬ (μ
λ°μ΄νΈλ¨
|
371 |
# βββββββββββββββββββββββββββββββββββββββββ
|
372 |
async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> Dict[str, Any]:
|
373 |
-
"""μ£Όμ΄μ§ key νλλ₯Ό λΆμν΄ μ 보 dict λ°ν.
|
374 |
_key = key.strip()
|
375 |
|
376 |
if not _key:
|
@@ -381,16 +459,11 @@ async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> D
|
|
381 |
result = get_key_openrouter_info(_key)
|
382 |
return {"key": _key, **result}
|
383 |
|
384 |
-
# Anthropic Claude
|
385 |
if re.match(re.compile(r"sk-ant-api03-[a-zA-Z0-9\-_]{93}AA"), _key) or \
|
386 |
(_key.startswith("sk-ant-") and len(_key) == 93) or \
|
387 |
(len(_key) == 89 and re.match(re.compile(r"sk-[a-zA-Z0-9]{86}"), _key)):
|
388 |
result = await get_key_ant_info(_key, rate_limit, claude_model)
|
389 |
-
# Anthropic ν€ μλ μ¬λΆ νλ¨: key_availabilityκ° Trueμ΄κ³ statusμ μ€λ₯κ° μμΌλ©΄ μλ
|
390 |
-
is_working = result.get("key_availability") and \
|
391 |
-
not any(error_word in str(result.get("status", "")).lower()
|
392 |
-
for error_word in ["error", "invalid", "expired", "forbidden", "unauthorized"])
|
393 |
-
result["is_anthropic_working"] = is_working
|
394 |
return {"key": _key, **result}
|
395 |
|
396 |
# Stability
|
@@ -403,15 +476,12 @@ async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> D
|
|
403 |
result = get_key_deepseek_info(_key)
|
404 |
return {"key": _key, **result}
|
405 |
|
406 |
-
# OpenAI (λ€λ₯Έ sk- ν¨ν΄λ³΄λ€ λ€μ μμΌ ν¨)
|
407 |
if _key.startswith("sk-"):
|
408 |
-
result = get_key_oai_info(_key)
|
409 |
-
# OpenAI ν€ μλ μ¬λΆ νλ¨: key_availabilityκ° Trueμ΄λ©΄ μλ
|
410 |
-
is_working = result.get("key_availability") == True
|
411 |
-
result["is_openai_working"] = is_working
|
412 |
return {"key": _key, **result}
|
413 |
|
414 |
-
# Google Gemini μ²λ¦¬
|
415 |
if _key.startswith("AIzaSy"):
|
416 |
gemini_info = get_key_gemini_info(_key)
|
417 |
is_working = gemini_info.get("key_availability") and gemini_info.get("status") == "Working"
|
@@ -492,45 +562,51 @@ async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> D
|
|
492 |
|
493 |
|
494 |
# βββββββββββββββββββββββββββββββββββββββββ
|
495 |
-
# μ¬λ¬ key λΉλκΈ° μ²λ¦¬ ν¨μ (μ
λ°μ΄νΈλ¨
|
496 |
# βββββββββββββββββββββββββββββββββββββββββ
|
497 |
-
async def sort_keys(text: str, rate_limit: bool, claude_model: str) -> Tuple[List[Dict[str, Any]], str]:
|
498 |
-
"""
|
|
|
|
|
|
|
499 |
keys = [k.strip() for k in text.splitlines() if k.strip()]
|
500 |
if not keys:
|
501 |
-
return [], ""
|
502 |
|
503 |
tasks = [process_single_key(k, rate_limit, claude_model) for k in keys]
|
504 |
results = await asyncio.gather(*tasks)
|
505 |
|
506 |
-
|
507 |
-
|
508 |
-
|
|
|
509 |
for result in results:
|
510 |
-
key_value = result.get("key"
|
511 |
-
|
512 |
-
|
513 |
-
|
514 |
-
|
515 |
-
|
516 |
-
|
517 |
-
|
518 |
-
|
519 |
-
|
520 |
-
|
521 |
-
|
522 |
-
|
523 |
-
|
524 |
-
|
525 |
-
|
526 |
-
|
|
|
|
|
527 |
|
528 |
|
529 |
# βββββββββββββββββββββββββββββββββββββββββ
|
530 |
-
# UI util
|
531 |
# βββββββββββββββββββββββββββββββββββββββββ
|
532 |
def clear_inputs():
|
533 |
-
return "", "", ""
|
534 |
|
535 |
|
536 |
# βββββββββββββββββββββββββββββββββββββββββ
|
@@ -539,9 +615,10 @@ def clear_inputs():
|
|
539 |
with gr.Blocks() as demo:
|
540 |
gr.Markdown(
|
541 |
"""
|
542 |
-
# Multi-API Key Status Checker (
|
543 |
*(Based on shaocongma, CncAnon1, su, Drago, kingbased key checkers)*
|
544 |
Check the status and details of various API keys including OpenAI, Anthropic, Gemini, Azure, Mistral, Replicate, AWS Claude, OpenRouter, Vertex AI (GCP Anthropic), Groq, NovelAI, ElevenLabs, xAI, Stability AI, and DeepSeek.
|
|
|
545 |
|
546 |
**Key Formats:**
|
547 |
* **AWS:** `AWS_ACCESS_KEY_ID:AWS_SECRET_ACCESS_KEY` (root might not be accurate)
|
@@ -582,29 +659,40 @@ Check the status and details of various API keys including OpenAI, Anthropic, Ge
|
|
582 |
|
583 |
with gr.Column(scale=2):
|
584 |
info = gr.JSON(label=" API Key Information (All Results)", open=True)
|
585 |
-
|
586 |
-
|
587 |
-
|
588 |
-
|
589 |
-
|
590 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
591 |
interactive=False,
|
592 |
)
|
593 |
|
594 |
-
# Clear λ²νΌ ν΄λ¦ μ μ
λ ₯ λ°μ€μ μΆλ ₯ λ°μ€ λͺ¨λ μ΄κΈ°ν
|
595 |
clear_button.click(
|
596 |
fn=clear_inputs,
|
597 |
inputs=None,
|
598 |
-
outputs=[key_box, info,
|
599 |
)
|
600 |
|
601 |
-
# Submit λ²νΌ ν΄λ¦ μ sort_keys ν¨μ νΈμΆ λ° κ²°κ³Ό λΆλ°°
|
602 |
submit_button.click(
|
603 |
fn=sort_keys,
|
604 |
inputs=[key_box, rate_limit, claude_model],
|
605 |
-
outputs=[info,
|
606 |
api_name="sort_keys",
|
607 |
)
|
608 |
|
609 |
-
# demo.launch(share=True)
|
610 |
demo.launch()
|
|
|
32 |
)
|
33 |
|
34 |
# βββββββββββββββββββββββββββββββββββββββββ
|
35 |
+
# Key-specific helper functions (OpenAI ν¨μ μμ λ¨)
|
36 |
# βββββββββββββββββββββββββββββββββββββββββ
|
37 |
def get_key_oai_info(key: str) -> Dict[str, Any]:
|
38 |
session = requests.Session()
|
39 |
+
# raw_status_codeλ HTTP μλ΅ μ½λ λλ μ μ¬ν μν νμμ
|
40 |
+
# org_data_or_errorλ μ±κ³΅ μ λ°μ΄ν°, μ€ν¨ μ μλ¬ μ 보 λ±
|
41 |
+
raw_status_code, org_data_or_error = check_key_availability(session, key)
|
42 |
|
43 |
info_dict = {
|
44 |
"key_type": "OpenAI",
|
45 |
+
"key_availability": False,
|
46 |
+
"has_sufficient_quota": True, # κΈ°λ³Έμ μΌλ‘ Trueλ‘ κ°μ , λ¬Έμ μ Falseλ‘ λ³κ²½
|
47 |
"gpt4_availability": "",
|
48 |
"gpt4_32k_availability": "",
|
49 |
"default_org": "",
|
|
|
54 |
"tokens_per_minute": "",
|
55 |
"quota": "",
|
56 |
"all_models": "",
|
57 |
+
"status_message": ""
|
58 |
}
|
59 |
|
60 |
+
org_data_for_subscription = None
|
61 |
+
|
62 |
+
if raw_status_code == 200:
|
63 |
+
info_dict["key_availability"] = True
|
64 |
+
org_data_for_subscription = org_data_or_error
|
65 |
+
elif raw_status_code == 401: # Unauthorized
|
66 |
+
info_dict["status_message"] = "Unauthorized: Invalid API key."
|
67 |
+
info_dict["has_sufficient_quota"] = False
|
68 |
+
return info_dict
|
69 |
+
elif raw_status_code == 403: # Forbidden
|
70 |
+
status_me, orgs_me_data = get_orgs_me(session, key)
|
71 |
+
if status_me == 200:
|
72 |
+
info_dict["key_availability"] = True
|
73 |
+
org_data_for_subscription = orgs_me_data
|
74 |
+
else:
|
75 |
+
info_dict["status_message"] = f"Forbidden, and get_orgs_me failed (status: {status_me}). Key might be inactive or lack permissions."
|
76 |
+
info_dict["has_sufficient_quota"] = False
|
77 |
+
return info_dict
|
78 |
+
elif raw_status_code == 429: # Too Many Requests (Rate Limit or Quota)
|
79 |
+
info_dict["key_availability"] = True # ν€ μ체λ μ ν¨ν μ μμ
|
80 |
+
info_dict["has_sufficient_quota"] = False # μΏΌν° λΆμ‘±μΌλ‘ κ°μ£Ό
|
81 |
+
info_dict["status_message"] = "Rate limit or quota likely exceeded (initial check)."
|
82 |
+
if isinstance(org_data_or_error, dict) and "error" in org_data_or_error:
|
83 |
+
error_details = org_data_or_error["error"]
|
84 |
+
current_quota_message = error_details.get("message", "Quota details unavailable from initial check")
|
85 |
+
info_dict["quota"] = current_quota_message
|
86 |
+
if "insufficient_quota" not in current_quota_message.lower(): # μμΈ λ©μμ§μ insufficient_quotaκ° μλ€λ©΄ μν λ©μμ§μ μΆκ°
|
87 |
+
info_dict["status_message"] += f" Error: {current_quota_message}"
|
88 |
+
# μ¬κΈ°μ λ°ννλ©΄ μλ ꡬλ
μ 보 νμΈμ 건λλ°μ§λ§, has_sufficient_quotaκ° μ΄λ―Έ Falseμ
|
89 |
+
# return info_dict # νμμ λ°λΌ μ£Όμ ν΄μ νμ¬ λ λΉ λ₯Έ λ°ν κ°λ₯
|
90 |
+
else: # κΈ°ν μλ¬
|
91 |
+
info_dict["status_message"] = f"Key check failed (status: {raw_status_code})."
|
92 |
+
if isinstance(org_data_or_error, dict) and "error" in org_data_or_error:
|
93 |
+
info_dict["status_message"] += f" Error: {org_data_or_error['error'].get('message', str(org_data_or_error))}"
|
94 |
+
elif isinstance(org_data_or_error, str):
|
95 |
+
info_dict["status_message"] += f" Details: {org_data_or_error}"
|
96 |
+
info_dict["has_sufficient_quota"] = False
|
97 |
return info_dict
|
98 |
|
99 |
+
if not info_dict["key_availability"]:
|
100 |
+
return info_dict # ν€κ° μ ν¨νμ§ μμΌλ©΄ λ μ΄μ μ§ν μ ν¨
|
101 |
+
|
102 |
+
# org_data_for_subscriptionμ΄ μ€λΉλμ§ μμ κ²½μ° (μ: μ΄κΈ° 200μ΄μμ§λ§ org_dataκ° λΆμ μ )
|
103 |
+
if not org_data_for_subscription:
|
104 |
+
status_me, orgs_me_data = get_orgs_me(session, key)
|
105 |
if status_me == 200:
|
106 |
+
org_data_for_subscription = orgs_me_data
|
107 |
+
else:
|
108 |
+
info_dict["status_message"] = (info_dict["status_message"] + " Could not identify organization for subscription.").strip()
|
109 |
+
info_dict["key_availability"] = False
|
110 |
+
info_dict["has_sufficient_quota"] = False
|
111 |
+
return info_dict
|
112 |
|
113 |
+
subscription_info = get_subscription(key, session, org_data_for_subscription)
|
114 |
if subscription_info:
|
115 |
info_dict.update(
|
116 |
{
|
|
|
122 |
"models": subscription_info.get("models", ""),
|
123 |
"requests_per_minute": subscription_info.get("rpm", ""),
|
124 |
"tokens_per_minute": subscription_info.get("tpm", ""),
|
125 |
+
"quota": subscription_info.get("quota", info_dict["quota"]),
|
126 |
"all_models": subscription_info.get("all_models", ""),
|
127 |
}
|
128 |
)
|
129 |
+
|
130 |
+
error_info = subscription_info.get("error")
|
131 |
+
if error_info and isinstance(error_info, dict):
|
132 |
+
err_type = error_info.get("type", "").lower()
|
133 |
+
err_code = error_info.get("code", "").lower()
|
134 |
+
err_msg = error_info.get("message", "").lower()
|
135 |
+
|
136 |
+
if "insufficient_quota" in err_type or \
|
137 |
+
"insufficient_quota" in err_code or \
|
138 |
+
"you exceeded your current quota" in err_msg or \
|
139 |
+
"payment required" in err_msg or \
|
140 |
+
("billing" in err_msg and "issue" in err_msg):
|
141 |
+
info_dict["has_sufficient_quota"] = False
|
142 |
+
new_quota_message = f"Insufficient: {error_info.get('message', err_type)}"
|
143 |
+
info_dict["quota"] = new_quota_message # μΏΌν° νλμ λͺ
μμ λ©μμ§
|
144 |
+
info_dict["status_message"] = (info_dict["status_message"] + f" Quota/Billing issue: {error_info.get('message', err_type)}").strip()
|
145 |
+
|
146 |
+
# κ³μ λΉνμ±ν λ± νμΈ
|
147 |
+
if "account_deactivated" in str(subscription_info).lower() or \
|
148 |
+
"payment_failed" in str(subscription_info).lower():
|
149 |
+
info_dict["has_sufficient_quota"] = False
|
150 |
+
if "Account issue" not in info_dict["status_message"]: # μ€λ³΅ λ©μμ§ λ°©μ§
|
151 |
+
info_dict["status_message"] = (info_dict["status_message"] + " Account issue (e.g., deactivated, payment failed).").strip()
|
152 |
+
else:
|
153 |
+
info_dict["status_message"] = (info_dict["status_message"] + " Failed to retrieve full subscription details.").strip()
|
154 |
+
info_dict["has_sufficient_quota"] = False
|
155 |
+
# ꡬλ
μ 보 μμΌλ©΄ key_availabilityλ Falseλ‘ κ°μ£Όν μ μμ.
|
156 |
+
# info_dict["key_availability"] = False # μ£Όμ μ²λ¦¬νμ¬ ν€ μ체λ μ ν¨ν μ μμμ λ¨κΉ
|
157 |
+
|
158 |
return info_dict
|
159 |
|
160 |
|
|
|
445 |
|
446 |
|
447 |
# βββββββββββββββββββββββββββββββββββββββββ
|
448 |
+
# λ¨μΌ ν€ λΉλκΈ° μ²λ¦¬ (μ
λ°μ΄νΈλ¨)
|
449 |
# βββββββββββββββββββββββββββββββββββββββββ
|
450 |
async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> Dict[str, Any]:
|
451 |
+
"""μ£Όμ΄μ§ key νλλ₯Ό λΆμν΄ μ 보 dict λ°ν. Gemini ν€μ μλ μ¬λΆ νλκ·Έ ν¬ν¨."""
|
452 |
_key = key.strip()
|
453 |
|
454 |
if not _key:
|
|
|
459 |
result = get_key_openrouter_info(_key)
|
460 |
return {"key": _key, **result}
|
461 |
|
462 |
+
# Anthropic Claude
|
463 |
if re.match(re.compile(r"sk-ant-api03-[a-zA-Z0-9\-_]{93}AA"), _key) or \
|
464 |
(_key.startswith("sk-ant-") and len(_key) == 93) or \
|
465 |
(len(_key) == 89 and re.match(re.compile(r"sk-[a-zA-Z0-9]{86}"), _key)):
|
466 |
result = await get_key_ant_info(_key, rate_limit, claude_model)
|
|
|
|
|
|
|
|
|
|
|
467 |
return {"key": _key, **result}
|
468 |
|
469 |
# Stability
|
|
|
476 |
result = get_key_deepseek_info(_key)
|
477 |
return {"key": _key, **result}
|
478 |
|
479 |
+
# OpenAI (λ€λ₯Έ sk- ν¨ν΄λ³΄λ€ λ€μ μμΌ ν¨)
|
480 |
if _key.startswith("sk-"):
|
481 |
+
result = get_key_oai_info(_key) # μμ λ ν¨μ νΈμΆ
|
|
|
|
|
|
|
482 |
return {"key": _key, **result}
|
483 |
|
484 |
+
# Google Gemini μ²λ¦¬
|
485 |
if _key.startswith("AIzaSy"):
|
486 |
gemini_info = get_key_gemini_info(_key)
|
487 |
is_working = gemini_info.get("key_availability") and gemini_info.get("status") == "Working"
|
|
|
562 |
|
563 |
|
564 |
# βββββββββββββββββββββββββββββββββββββββββ
|
565 |
+
# μ¬λ¬ key λΉλκΈ° μ²λ¦¬ ν¨μ (μ
λ°μ΄νΈλ¨)
|
566 |
# βββββββββββββββββββββββββββββββββββββββββ
|
567 |
+
async def sort_keys(text: str, rate_limit: bool, claude_model: str) -> Tuple[List[Dict[str, Any]], str, str, str]:
|
568 |
+
"""
|
569 |
+
ν
μ€νΈ λ°μ€μ μ
λ ₯λ μ¬λ¬ ν€λ₯Ό μ€ λ¨μλ‘ λΆμνκ³ ,
|
570 |
+
μ 체 κ²°κ³Όμ μλνλ OAI, Anthropic, Gemini ν€ λͺ©λ‘μ κ°κ° λ°νν©λλ€.
|
571 |
+
"""
|
572 |
keys = [k.strip() for k in text.splitlines() if k.strip()]
|
573 |
if not keys:
|
574 |
+
return [], "", "", ""
|
575 |
|
576 |
tasks = [process_single_key(k, rate_limit, claude_model) for k in keys]
|
577 |
results = await asyncio.gather(*tasks)
|
578 |
|
579 |
+
working_gemini_keys = []
|
580 |
+
working_oai_keys = []
|
581 |
+
working_anthropic_keys = []
|
582 |
+
|
583 |
for result in results:
|
584 |
+
key_value = result.get("key")
|
585 |
+
if not key_value:
|
586 |
+
continue
|
587 |
+
|
588 |
+
key_type = result.get("key_type")
|
589 |
+
|
590 |
+
# Gemini ν€ νν°λ§
|
591 |
+
if result.get("is_gemini_working"):
|
592 |
+
working_gemini_keys.append(key_value)
|
593 |
+
# OpenAI ν€ νν°λ§ (μμ λ¨: has_sufficient_quota νμΈ)
|
594 |
+
elif key_type == "OpenAI" and \
|
595 |
+
result.get("key_availability") is True and \
|
596 |
+
result.get("has_sufficient_quota") is True: # μ΄ νλκ° Trueμ¬μΌ ν¨
|
597 |
+
working_oai_keys.append(key_value)
|
598 |
+
# Anthropic ν€ νν°λ§
|
599 |
+
elif key_type == "Anthropic Claude" and result.get("key_availability") is True:
|
600 |
+
working_anthropic_keys.append(key_value)
|
601 |
+
|
602 |
+
return results, "\n".join(working_oai_keys), "\n".join(working_anthropic_keys), "\n".join(working_gemini_keys)
|
603 |
|
604 |
|
605 |
# βββββββββββββββββββββββββββββββββββββββββ
|
606 |
+
# UI util (μ
λ°μ΄νΈλ¨)
|
607 |
# βββββββββββββββββββββββββββββββββββββββββ
|
608 |
def clear_inputs():
|
609 |
+
return "", "", "", "", ""
|
610 |
|
611 |
|
612 |
# βββββββββββββββββββββββββββββββββββββββββ
|
|
|
615 |
with gr.Blocks() as demo:
|
616 |
gr.Markdown(
|
617 |
"""
|
618 |
+
# Multi-API Key Status Checker (OAI, Anthropic, Gemini Enhanced)
|
619 |
*(Based on shaocongma, CncAnon1, su, Drago, kingbased key checkers)*
|
620 |
Check the status and details of various API keys including OpenAI, Anthropic, Gemini, Azure, Mistral, Replicate, AWS Claude, OpenRouter, Vertex AI (GCP Anthropic), Groq, NovelAI, ElevenLabs, xAI, Stability AI, and DeepSeek.
|
621 |
+
This version highlights working OpenAI (with sufficient quota), Anthropic, and Gemini keys in separate text boxes.
|
622 |
|
623 |
**Key Formats:**
|
624 |
* **AWS:** `AWS_ACCESS_KEY_ID:AWS_SECRET_ACCESS_KEY` (root might not be accurate)
|
|
|
659 |
|
660 |
with gr.Column(scale=2):
|
661 |
info = gr.JSON(label=" API Key Information (All Results)", open=True)
|
662 |
+
oai_keys_output = gr.Textbox(
|
663 |
+
label="Working OpenAI Keys (Sufficient Quota)",
|
664 |
+
info="Lists OpenAI keys confirmed as working and having sufficient quota.",
|
665 |
+
lines=3,
|
666 |
+
max_lines=10,
|
667 |
+
interactive=False,
|
668 |
+
)
|
669 |
+
anthropic_keys_output = gr.Textbox(
|
670 |
+
label="Working Anthropic Keys",
|
671 |
+
info="Lists Anthropic keys confirmed as working (key_availability is True).",
|
672 |
+
lines=3,
|
673 |
+
max_lines=10,
|
674 |
+
interactive=False,
|
675 |
+
)
|
676 |
+
gemini_keys_output = gr.Textbox(
|
677 |
+
label="Working Gemini Keys",
|
678 |
+
info="Lists Gemini keys confirmed as 'Working'.",
|
679 |
+
lines=3,
|
680 |
+
max_lines=10,
|
681 |
interactive=False,
|
682 |
)
|
683 |
|
|
|
684 |
clear_button.click(
|
685 |
fn=clear_inputs,
|
686 |
inputs=None,
|
687 |
+
outputs=[key_box, info, oai_keys_output, anthropic_keys_output, gemini_keys_output]
|
688 |
)
|
689 |
|
|
|
690 |
submit_button.click(
|
691 |
fn=sort_keys,
|
692 |
inputs=[key_box, rate_limit, claude_model],
|
693 |
+
outputs=[info, oai_keys_output, anthropic_keys_output, gemini_keys_output],
|
694 |
api_name="sort_keys",
|
695 |
)
|
696 |
|
697 |
+
# demo.launch(share=True)
|
698 |
demo.launch()
|