Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -367,10 +367,10 @@ def not_supported(key: str) -> Dict[str, Any]:
|
|
367 |
|
368 |
|
369 |
# βββββββββββββββββββββββββββββββββββββββββ
|
370 |
-
# λ¨μΌ ν€ λΉλκΈ° μ²λ¦¬ (μ
λ°μ΄νΈλ¨)
|
371 |
# βββββββββββββββββββββββββββββββββββββββββ
|
372 |
async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> Dict[str, Any]:
|
373 |
-
"""μ£Όμ΄μ§ key νλλ₯Ό λΆμν΄ μ 보 dict λ°ν.
|
374 |
_key = key.strip()
|
375 |
|
376 |
if not _key:
|
@@ -379,58 +379,63 @@ async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> D
|
|
379 |
# OpenRouter
|
380 |
if re.match(re.compile(r"sk-or-v1-[a-z0-9]{64}"), _key):
|
381 |
result = get_key_openrouter_info(_key)
|
382 |
-
return {"key": _key, **result}
|
383 |
|
384 |
-
# Anthropic Claude
|
385 |
if re.match(re.compile(r"sk-ant-api03-[a-zA-Z0-9\-_]{93}AA"), _key) or \
|
386 |
(_key.startswith("sk-ant-") and len(_key) == 93) or \
|
387 |
(len(_key) == 89 and re.match(re.compile(r"sk-[a-zA-Z0-9]{86}"), _key)):
|
388 |
result = await get_key_ant_info(_key, rate_limit, claude_model)
|
389 |
-
|
|
|
|
|
|
|
|
|
|
|
390 |
|
391 |
# Stability
|
392 |
if re.match(re.compile(r"sk-[a-zA-Z0-9]{48}"), _key) and len(_key) == 51 and "T3BlbkFJ" not in _key:
|
393 |
result = get_key_stability_info(_key)
|
394 |
-
return {"key": _key, **result}
|
395 |
|
396 |
# Deepseek
|
397 |
if re.match(re.compile(r"sk-[a-f0-9]{32}"), _key):
|
398 |
result = get_key_deepseek_info(_key)
|
399 |
-
return {"key": _key, **result}
|
400 |
|
401 |
-
# OpenAI (λ€λ₯Έ sk- ν¨ν΄λ³΄λ€ λ€μ μμΌ ν¨)
|
402 |
if _key.startswith("sk-"):
|
403 |
result = get_key_oai_info(_key)
|
404 |
-
|
|
|
|
|
|
|
405 |
|
406 |
-
# Google Gemini μ²λ¦¬ (
|
407 |
if _key.startswith("AIzaSy"):
|
408 |
-
gemini_info = get_key_gemini_info(_key)
|
409 |
-
# μλ μ¬λΆ νλκ·Έ μΆκ° (status νμΈ)
|
410 |
is_working = gemini_info.get("key_availability") and gemini_info.get("status") == "Working"
|
411 |
-
# κ²°κ³Ό λμ
λ리 ꡬμ±: μλ³Έ μ 보 + key_type + μλ μ¬λΆ νλκ·Έ
|
412 |
-
# gemini_infoμ μ΄λ―Έ 'key'κ° ν¬ν¨λμ΄ μμ
|
413 |
result = {
|
414 |
-
"key_type": "Google Gemini",
|
415 |
**gemini_info,
|
416 |
-
"is_gemini_working": is_working
|
417 |
}
|
418 |
return result
|
419 |
|
420 |
# NovelAI
|
421 |
if _key.startswith("pst-"):
|
422 |
result = get_key_nai_info(_key)
|
423 |
-
return {"key": _key, **result}
|
424 |
|
425 |
# Replicate
|
426 |
if (_key.startswith("r8_") and len(_key) == 40) or (_key.islower() and len(_key) == 40):
|
427 |
result = get_key_replicate_info(_key)
|
428 |
-
return {"key": _key, **result}
|
429 |
|
430 |
# xAI
|
431 |
if _key.startswith("xai-"):
|
432 |
result = get_key_xai_info(_key)
|
433 |
-
return {"key": _key, **result}
|
434 |
|
435 |
# Azure endpoint: "name:key"
|
436 |
if len(_key.split(":")) == 2:
|
@@ -439,59 +444,58 @@ async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> D
|
|
439 |
endpoint = f"https://{name}.openai.azure.com/"
|
440 |
api_key = potential_key
|
441 |
result = get_key_azure_info(endpoint, api_key)
|
442 |
-
return {"key": _key, **result}
|
443 |
|
444 |
# Azure endpoint: "https://xxx.openai.azure.com;key"
|
445 |
if ";" in _key and "openai.azure.com" in _key.split(";")[0]:
|
446 |
endpoint, api_key = _key.split(";", 1)
|
447 |
result = get_key_azure_info(endpoint, api_key)
|
448 |
-
return {"key": _key, **result}
|
449 |
|
450 |
# AWS
|
451 |
if _key.startswith("AKIA") and len(_key.split(":")[0]) == 20 and _key.split(":")[0].isalnum() and _key.split(":")[0].isupper() and len(_key.split(':')) == 2:
|
452 |
result = await get_key_aws_info(_key)
|
453 |
-
return {"key": _key, **result}
|
454 |
|
455 |
# ElevenLabs
|
456 |
if re.fullmatch(r"[a-f0-9]{32}", _key) or re.fullmatch(r"sk_[a-f0-9]{48}", _key):
|
457 |
result = get_key_elevenlabs_info(_key)
|
458 |
-
return {"key": _key, **result}
|
459 |
|
460 |
# Mistral
|
461 |
if re.fullmatch(r"[a-zA-Z0-9]{32}", _key) and not _key.startswith('sk-'):
|
462 |
result = get_key_mistral_info(_key)
|
463 |
-
return {"key": _key, **result}
|
464 |
|
465 |
# Groq
|
466 |
if re.match(re.compile(r"gsk_[a-zA-Z0-9]{20}WGdyb3FY[a-zA-Z0-9]{24}"), _key):
|
467 |
result = get_key_groq_info(_key)
|
468 |
-
return {"key": _key, **result}
|
469 |
|
470 |
# GCP - refresh token
|
471 |
if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+:.+"), _key):
|
472 |
parts = _key.split(':')
|
473 |
if len(parts) >= 4:
|
474 |
result = await get_key_gcp_info(_key, 0)
|
475 |
-
return {"key": _key, **result}
|
476 |
|
477 |
# GCP - service account
|
478 |
if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+\\n"), _key):
|
479 |
parts = _key.split(':')
|
480 |
if len(parts) >= 3:
|
481 |
result = await get_key_gcp_info(_key, 1)
|
482 |
-
return {"key": _key, **result}
|
483 |
|
484 |
# Not supported
|
485 |
-
result = not_supported(_key)
|
486 |
-
# ν€ κ° μΆκ°νμ¬ λ°ν
|
487 |
return {"key": _key, **result}
|
488 |
|
489 |
|
490 |
# βββββββββββββββββββββββββββββββββββββββββ
|
491 |
-
# μ¬λ¬ key λΉλκΈ° μ²λ¦¬ ν¨μ (μ
λ°μ΄νΈλ¨)
|
492 |
# βββββββββββββββββββββββββββββββββββββββββ
|
493 |
async def sort_keys(text: str, rate_limit: bool, claude_model: str) -> Tuple[List[Dict[str, Any]], str]:
|
494 |
-
"""ν
μ€νΈ λ°μ€μ μ
λ ₯λ μ¬λ¬ ν€λ₯Ό μ€ λ¨μλ‘ λΆμνκ³ , μ 체 κ²°κ³Όμ μλνλ
|
495 |
keys = [k.strip() for k in text.splitlines() if k.strip()]
|
496 |
if not keys:
|
497 |
return [], ""
|
@@ -499,23 +503,34 @@ async def sort_keys(text: str, rate_limit: bool, claude_model: str) -> Tuple[Lis
|
|
499 |
tasks = [process_single_key(k, rate_limit, claude_model) for k in keys]
|
500 |
results = await asyncio.gather(*tasks)
|
501 |
|
502 |
-
# μλνλ
|
503 |
-
|
|
|
504 |
for result in results:
|
505 |
-
|
506 |
-
|
507 |
-
|
508 |
-
|
509 |
-
|
510 |
-
|
511 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
512 |
|
513 |
|
514 |
# βββββββββββββββββββββββββββββββββββββββββ
|
515 |
# UI util
|
516 |
# βββββββββββββββββββββββββββββββββββββββββ
|
517 |
-
def clear_inputs():
|
518 |
-
return "", "", ""
|
519 |
|
520 |
|
521 |
# βββββββββββββββββββββββββββββββββββββββββ
|
@@ -524,7 +539,7 @@ def clear_inputs(): # μ
λ ₯ μΈμ μ κ±° (key_box κ°μ νμ μμ)
|
|
524 |
with gr.Blocks() as demo:
|
525 |
gr.Markdown(
|
526 |
"""
|
527 |
-
# Multi-API Key Status Checker (
|
528 |
*(Based on shaocongma, CncAnon1, su, Drago, kingbased key checkers)*
|
529 |
Check the status and details of various API keys including OpenAI, Anthropic, Gemini, Azure, Mistral, Replicate, AWS Claude, OpenRouter, Vertex AI (GCP Anthropic), Groq, NovelAI, ElevenLabs, xAI, Stability AI, and DeepSeek.
|
530 |
|
@@ -567,27 +582,27 @@ Check the status and details of various API keys including OpenAI, Anthropic, Ge
|
|
567 |
|
568 |
with gr.Column(scale=2):
|
569 |
info = gr.JSON(label=" API Key Information (All Results)", open=True)
|
570 |
-
#
|
571 |
-
|
572 |
-
label="Working Gemini
|
573 |
-
info="Lists
|
574 |
-
lines=
|
575 |
-
max_lines=
|
576 |
-
interactive=False,
|
577 |
)
|
578 |
|
579 |
# Clear λ²νΌ ν΄λ¦ μ μ
λ ₯ λ°μ€μ μΆλ ₯ λ°μ€ λͺ¨λ μ΄κΈ°ν
|
580 |
clear_button.click(
|
581 |
-
fn=clear_inputs,
|
582 |
inputs=None,
|
583 |
-
outputs=[key_box, info,
|
584 |
)
|
585 |
|
586 |
# Submit λ²νΌ ν΄λ¦ μ sort_keys ν¨μ νΈμΆ λ° κ²°κ³Ό λΆλ°°
|
587 |
submit_button.click(
|
588 |
fn=sort_keys,
|
589 |
inputs=[key_box, rate_limit, claude_model],
|
590 |
-
outputs=[info,
|
591 |
api_name="sort_keys",
|
592 |
)
|
593 |
|
|
|
367 |
|
368 |
|
369 |
# βββββββββββββββββββββββββββββββββββββββββ
|
370 |
+
# λ¨μΌ ν€ λΉλκΈ° μ²λ¦¬ (μ
λ°μ΄νΈλ¨ - OpenAI, Anthropic μλ νλκ·Έ μΆκ°)
|
371 |
# βββββββββββββββββββββββββββββββββββββββββ
|
372 |
async def process_single_key(key: str, rate_limit: bool, claude_model: str) -> Dict[str, Any]:
|
373 |
+
"""μ£Όμ΄μ§ key νλλ₯Ό λΆμν΄ μ 보 dict λ°ν. μλνλ ν€λ€μ νλκ·Έ ν¬ν¨."""
|
374 |
_key = key.strip()
|
375 |
|
376 |
if not _key:
|
|
|
379 |
# OpenRouter
|
380 |
if re.match(re.compile(r"sk-or-v1-[a-z0-9]{64}"), _key):
|
381 |
result = get_key_openrouter_info(_key)
|
382 |
+
return {"key": _key, **result}
|
383 |
|
384 |
+
# Anthropic Claude (μλ νλκ·Έ μΆκ°)
|
385 |
if re.match(re.compile(r"sk-ant-api03-[a-zA-Z0-9\-_]{93}AA"), _key) or \
|
386 |
(_key.startswith("sk-ant-") and len(_key) == 93) or \
|
387 |
(len(_key) == 89 and re.match(re.compile(r"sk-[a-zA-Z0-9]{86}"), _key)):
|
388 |
result = await get_key_ant_info(_key, rate_limit, claude_model)
|
389 |
+
# Anthropic ν€ μλ μ¬λΆ νλ¨: key_availabilityκ° Trueμ΄κ³ statusμ μ€λ₯κ° μμΌλ©΄ μλ
|
390 |
+
is_working = result.get("key_availability") and \
|
391 |
+
not any(error_word in str(result.get("status", "")).lower()
|
392 |
+
for error_word in ["error", "invalid", "expired", "forbidden", "unauthorized"])
|
393 |
+
result["is_anthropic_working"] = is_working
|
394 |
+
return {"key": _key, **result}
|
395 |
|
396 |
# Stability
|
397 |
if re.match(re.compile(r"sk-[a-zA-Z0-9]{48}"), _key) and len(_key) == 51 and "T3BlbkFJ" not in _key:
|
398 |
result = get_key_stability_info(_key)
|
399 |
+
return {"key": _key, **result}
|
400 |
|
401 |
# Deepseek
|
402 |
if re.match(re.compile(r"sk-[a-f0-9]{32}"), _key):
|
403 |
result = get_key_deepseek_info(_key)
|
404 |
+
return {"key": _key, **result}
|
405 |
|
406 |
+
# OpenAI (λ€λ₯Έ sk- ν¨ν΄λ³΄λ€ λ€μ μμΌ ν¨) (μλ νλκ·Έ μΆκ°)
|
407 |
if _key.startswith("sk-"):
|
408 |
result = get_key_oai_info(_key)
|
409 |
+
# OpenAI ν€ μλ μ¬λΆ νλ¨: key_availabilityκ° Trueμ΄λ©΄ μλ
|
410 |
+
is_working = result.get("key_availability") == True
|
411 |
+
result["is_openai_working"] = is_working
|
412 |
+
return {"key": _key, **result}
|
413 |
|
414 |
+
# Google Gemini μ²λ¦¬ (κΈ°μ‘΄ μ μ§)
|
415 |
if _key.startswith("AIzaSy"):
|
416 |
+
gemini_info = get_key_gemini_info(_key)
|
|
|
417 |
is_working = gemini_info.get("key_availability") and gemini_info.get("status") == "Working"
|
|
|
|
|
418 |
result = {
|
419 |
+
"key_type": "Google Gemini",
|
420 |
**gemini_info,
|
421 |
+
"is_gemini_working": is_working
|
422 |
}
|
423 |
return result
|
424 |
|
425 |
# NovelAI
|
426 |
if _key.startswith("pst-"):
|
427 |
result = get_key_nai_info(_key)
|
428 |
+
return {"key": _key, **result}
|
429 |
|
430 |
# Replicate
|
431 |
if (_key.startswith("r8_") and len(_key) == 40) or (_key.islower() and len(_key) == 40):
|
432 |
result = get_key_replicate_info(_key)
|
433 |
+
return {"key": _key, **result}
|
434 |
|
435 |
# xAI
|
436 |
if _key.startswith("xai-"):
|
437 |
result = get_key_xai_info(_key)
|
438 |
+
return {"key": _key, **result}
|
439 |
|
440 |
# Azure endpoint: "name:key"
|
441 |
if len(_key.split(":")) == 2:
|
|
|
444 |
endpoint = f"https://{name}.openai.azure.com/"
|
445 |
api_key = potential_key
|
446 |
result = get_key_azure_info(endpoint, api_key)
|
447 |
+
return {"key": _key, **result}
|
448 |
|
449 |
# Azure endpoint: "https://xxx.openai.azure.com;key"
|
450 |
if ";" in _key and "openai.azure.com" in _key.split(";")[0]:
|
451 |
endpoint, api_key = _key.split(";", 1)
|
452 |
result = get_key_azure_info(endpoint, api_key)
|
453 |
+
return {"key": _key, **result}
|
454 |
|
455 |
# AWS
|
456 |
if _key.startswith("AKIA") and len(_key.split(":")[0]) == 20 and _key.split(":")[0].isalnum() and _key.split(":")[0].isupper() and len(_key.split(':')) == 2:
|
457 |
result = await get_key_aws_info(_key)
|
458 |
+
return {"key": _key, **result}
|
459 |
|
460 |
# ElevenLabs
|
461 |
if re.fullmatch(r"[a-f0-9]{32}", _key) or re.fullmatch(r"sk_[a-f0-9]{48}", _key):
|
462 |
result = get_key_elevenlabs_info(_key)
|
463 |
+
return {"key": _key, **result}
|
464 |
|
465 |
# Mistral
|
466 |
if re.fullmatch(r"[a-zA-Z0-9]{32}", _key) and not _key.startswith('sk-'):
|
467 |
result = get_key_mistral_info(_key)
|
468 |
+
return {"key": _key, **result}
|
469 |
|
470 |
# Groq
|
471 |
if re.match(re.compile(r"gsk_[a-zA-Z0-9]{20}WGdyb3FY[a-zA-Z0-9]{24}"), _key):
|
472 |
result = get_key_groq_info(_key)
|
473 |
+
return {"key": _key, **result}
|
474 |
|
475 |
# GCP - refresh token
|
476 |
if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+:.+"), _key):
|
477 |
parts = _key.split(':')
|
478 |
if len(parts) >= 4:
|
479 |
result = await get_key_gcp_info(_key, 0)
|
480 |
+
return {"key": _key, **result}
|
481 |
|
482 |
# GCP - service account
|
483 |
if re.match(re.compile(r"[\w\-]+:[\w\-@\.]+:.+\\n"), _key):
|
484 |
parts = _key.split(':')
|
485 |
if len(parts) >= 3:
|
486 |
result = await get_key_gcp_info(_key, 1)
|
487 |
+
return {"key": _key, **result}
|
488 |
|
489 |
# Not supported
|
490 |
+
result = not_supported(_key)
|
|
|
491 |
return {"key": _key, **result}
|
492 |
|
493 |
|
494 |
# βββββββββββββββββββββββββββββββββββββββββ
|
495 |
+
# μ¬λ¬ key λΉλκΈ° μ²λ¦¬ ν¨μ (μ
λ°μ΄νΈλ¨ - λͺ¨λ μλ ν€ λ°ν)
|
496 |
# βββββββββββββββββββββββββββββββββββββββββ
|
497 |
async def sort_keys(text: str, rate_limit: bool, claude_model: str) -> Tuple[List[Dict[str, Any]], str]:
|
498 |
+
"""ν
μ€νΈ λ°μ€μ μ
λ ₯λ μ¬λ¬ ν€λ₯Ό μ€ λ¨μλ‘ λΆμνκ³ , μ 체 κ²°κ³Όμ μλνλ ν€ λͺ©λ‘μ λ°ν."""
|
499 |
keys = [k.strip() for k in text.splitlines() if k.strip()]
|
500 |
if not keys:
|
501 |
return [], ""
|
|
|
503 |
tasks = [process_single_key(k, rate_limit, claude_model) for k in keys]
|
504 |
results = await asyncio.gather(*tasks)
|
505 |
|
506 |
+
# μλνλ ν€λ€μ μμ§
|
507 |
+
working_keys = []
|
508 |
+
|
509 |
for result in results:
|
510 |
+
key_value = result.get("key", "")
|
511 |
+
key_type = result.get("key_type", "")
|
512 |
+
|
513 |
+
# OpenAI ν€ νμΈ
|
514 |
+
if result.get("is_openai_working"):
|
515 |
+
working_keys.append(f"[OpenAI] {key_value}")
|
516 |
+
|
517 |
+
# Anthropic ν€ νμΈ
|
518 |
+
elif result.get("is_anthropic_working"):
|
519 |
+
working_keys.append(f"[Anthropic] {key_value}")
|
520 |
+
|
521 |
+
# Gemini ν€ νμΈ (κΈ°μ‘΄)
|
522 |
+
elif result.get("is_gemini_working"):
|
523 |
+
working_keys.append(f"[Gemini] {key_value}")
|
524 |
+
|
525 |
+
# κ²°κ³Όλ₯Ό JSONμ© λ¦¬μ€νΈμ μλνλ ν€ λͺ©λ‘ λ¬Έμμ΄λ‘ λ°ν
|
526 |
+
return results, "\n".join(working_keys)
|
527 |
|
528 |
|
529 |
# βββββββββββββββββββββββββββββββββββββββββ
|
530 |
# UI util
|
531 |
# βββββββββββββββββββββββββββββββββββββββββ
|
532 |
+
def clear_inputs():
|
533 |
+
return "", "", ""
|
534 |
|
535 |
|
536 |
# βββββββββββββββββββββββββββββββββββββββββ
|
|
|
539 |
with gr.Blocks() as demo:
|
540 |
gr.Markdown(
|
541 |
"""
|
542 |
+
# Multi-API Key Status Checker (Enhanced Working Keys Display)
|
543 |
*(Based on shaocongma, CncAnon1, su, Drago, kingbased key checkers)*
|
544 |
Check the status and details of various API keys including OpenAI, Anthropic, Gemini, Azure, Mistral, Replicate, AWS Claude, OpenRouter, Vertex AI (GCP Anthropic), Groq, NovelAI, ElevenLabs, xAI, Stability AI, and DeepSeek.
|
545 |
|
|
|
582 |
|
583 |
with gr.Column(scale=2):
|
584 |
info = gr.JSON(label=" API Key Information (All Results)", open=True)
|
585 |
+
# μλνλ ν€λ€ μΆλ ₯ λ°μ€ (νμ₯λ¨)
|
586 |
+
working_keys_output = gr.Textbox(
|
587 |
+
label="Working Keys (OpenAI, Anthropic, Gemini)",
|
588 |
+
info="Lists all keys confirmed as working for OpenAI, Anthropic Claude, and Gemini services.",
|
589 |
+
lines=5,
|
590 |
+
max_lines=15,
|
591 |
+
interactive=False,
|
592 |
)
|
593 |
|
594 |
# Clear λ²νΌ ν΄λ¦ μ μ
λ ₯ λ°μ€μ μΆλ ₯ λ°μ€ λͺ¨λ μ΄κΈ°ν
|
595 |
clear_button.click(
|
596 |
+
fn=clear_inputs,
|
597 |
inputs=None,
|
598 |
+
outputs=[key_box, info, working_keys_output]
|
599 |
)
|
600 |
|
601 |
# Submit λ²νΌ ν΄λ¦ μ sort_keys ν¨μ νΈμΆ λ° κ²°κ³Ό λΆλ°°
|
602 |
submit_button.click(
|
603 |
fn=sort_keys,
|
604 |
inputs=[key_box, rate_limit, claude_model],
|
605 |
+
outputs=[info, working_keys_output],
|
606 |
api_name="sort_keys",
|
607 |
)
|
608 |
|