Spaces:
Running
Running
File size: 6,416 Bytes
3ce989d fa43e81 3ce989d c0b34a2 3ce989d a807c4d 3ce989d 829d0b8 3ce989d 829d0b8 30c882f 829d0b8 30c882f 829d0b8 30c882f 829d0b8 3ce989d fa43e81 3ce989d a5cafbd 3ce989d 829d0b8 3ce989d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 |
"""
anthropic_api.py
This file defines the interaction with the Anthropic API, focusing on generating text
using the Claude model. It includes functionality for input validation, API request handling,
and processing API responses.
Key Features:
- Encapsulates all logic related to the Anthropic API.
- Implements retry logic for handling transient API errors.
- Validates the response content to ensure API compatibility.
- Provides detailed logging for debugging and error tracking.
Classes:
- AnthropicError: Custom exception for Anthropic API-related errors.
- SystemPrompt: Frozen dataclass for storing the system prompt, ensuring immutability.
Functions:
- generate_text_with_claude: Generates text using the Anthropic SDK with input validation and retry logic.
"""
# Standard Library Imports
from dataclasses import dataclass
import logging
from typing import List, Optional, Union
# Third-Party Library Imports
from anthropic import Anthropic
from anthropic.types import Message, ModelParam, TextBlock
from tenacity import retry, stop_after_attempt, wait_fixed, before_log, after_log
# Local Application Imports
from src.config import logger
from src.utils import truncate_text, validate_env_var
@dataclass(frozen=True)
class AnthropicConfig:
"""
Immutable configuration for interacting with the Anthropic API.
Includes client initialization for encapsulation.
"""
api_key: str = validate_env_var("ANTHROPIC_API_KEY")
model: ModelParam = "claude-3-5-sonnet-latest" # Valid predefined model
max_tokens: int = 300 # Max tokens for API response
system_prompt: str = """You are a highly creative and articulate assistant specialized in generating vivid, engaging, and well-written content.
Your task is to respond to user prompts by creating:
1. Short stories,
2. Poems,
3. Or other creative written outputs.
Ensure that your responses are:
- Imaginative and original,
- Coherent and well-structured,
- Suitable for a wide audience, avoiding controversial or sensitive topics.
When writing, tailor your tone and style to match the user's request. For example:
- If the user requests a poem, provide creative and rhythmic verse.
- If the user requests a short story, ensure a clear beginning, middle, and end with compelling details.
Always keep your responses concise, unless explicitly instructed to elaborate."""
def __post_init__(self):
# Validate that required attributes are set
if not self.api_key:
raise ValueError("Anthropic API key is not set.")
if not self.model:
raise ValueError("Anthropic Model is not set.")
if not self.max_tokens:
raise ValueError("Anthropic Max Tokens is not set.")
if not self.system_prompt:
raise ValueError("Anthropic System Prompt is not set.")
@property
def client(self) -> Anthropic:
"""
Lazy initialization of the Anthropic client.
Returns:
Anthropic: Configured client instance.
"""
return Anthropic(api_key=self.api_key)
class AnthropicError(Exception):
"""Custom exception for errors related to the Anthropic API."""
def __init__(self, message: str, original_exception: Optional[Exception] = None):
super().__init__(message)
self.original_exception = original_exception
# Initialize the Anthropic client
anthropic_config = AnthropicConfig()
@retry(
stop=stop_after_attempt(3),
wait=wait_fixed(2),
before=before_log(logger, logging.DEBUG),
after=after_log(logger, logging.DEBUG),
)
def generate_text_with_claude(prompt: str) -> str:
"""
Generates text using Claude via the Anthropic SDK.
Args:
prompt (str): The input prompt for Claude.
Returns:
str: The generated text.
Raises:
ValueError: If the prompt exceeds the maximum allowed length.
AnthropicError: If there is an error communicating with the Anthropic API.
Example:
>>> generate_text_with_claude("Write a haiku about nature.")
"Gentle waves crashing, / Whispering secrets softly, / Infinite blue skies."
>>> generate_text_with_claude("")
"The prompt exceeds the maximum allowed length of 500 characters. Your prompt contains 512 characters."
"""
# Log model, max tokens, and system prompt for debugging
logger.debug(f"Using model: {anthropic_config.model}, max tokens: {anthropic_config.max_tokens}")
logger.debug(f"System prompt: {truncate_text(anthropic_config.system_prompt)}")
logger.debug(f"Preparing API request with prompt: {prompt[:50]}{'...' if len(prompt) > 50 else ''}")
try:
response: Message = anthropic_config.client.messages.create(
model=anthropic_config.model,
max_tokens=anthropic_config.max_tokens,
system=anthropic_config.system_prompt,
messages=[{"role": "user", "content": prompt}],
)
logger.debug(f"API response received: {truncate_text(str(response))}")
# Validate response content
if not hasattr(response, "content"):
logger.error("Response is missing 'content'. Response: %s", response)
raise AnthropicError("Invalid API response: Missing 'content'.")
# Process response content
blocks: Union[List[TextBlock], TextBlock, None] = response.content
if isinstance(blocks, list):
result = "\n\n".join(block.text for block in blocks if isinstance(block, TextBlock))
logger.debug(f"Processed response from list: {truncate_text(result)}")
return result
if isinstance(blocks, TextBlock):
logger.debug(f"Processed response from single TextBlock: {truncate_text(blocks.text)}")
return blocks.text
logger.warning(f"Unexpected response type: {type(blocks)}")
return str(blocks or "No content generated.")
except Exception as e:
logger.exception(f"Error generating text with Claude: {e}")
raise AnthropicError(
message=(
f"Error generating text with Claude: {e}. "
f"HTTP Status: {getattr(response, 'status', 'N/A')}. "
f"Prompt (truncated): {truncate_text(prompt)}. "
f"Model: {anthropic_config.model}, Max tokens: {anthropic_config.max_tokens}"
),
original_exception=e,
) |