Spaces:
Build error
Build error
File size: 7,081 Bytes
ab3b796 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 |
"""
Hugging Face Provider Integration
Handles API calls to Hugging Face for AI model inference
"""
import os
import requests
import time
import json
import logging
from typing import Dict, Any, Optional, List
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("huggingface")
class HuggingFaceProvider:
"""Hugging Face API provider for model inference"""
def __init__(self, api_key: Optional[str] = None):
"""Initialize the Hugging Face provider with API key"""
self.api_key = api_key or os.getenv("HUGGINGFACE_API_KEY")
if not self.api_key:
logger.warning("No Hugging Face API key provided. Set HUGGINGFACE_API_KEY env variable.")
self.base_url = "https://api-inference.huggingface.co/models"
self.headers = {"Authorization": f"Bearer {self.api_key}"} if self.api_key else {}
def generate_text(self,
prompt: str,
model: str = "mistralai/Mistral-7B-Instruct-v0.2",
max_tokens: int = 1000,
temperature: float = 0.7,
**kwargs) -> Dict[str, Any]:
"""Generate text using Hugging Face text generation models"""
start_time = time.time()
try:
url = f"{self.base_url}/{model}"
payload = {
"inputs": prompt,
"parameters": {
"max_new_tokens": max_tokens,
"temperature": temperature,
"return_full_text": False,
**kwargs
}
}
response = requests.post(
url,
headers=self.headers,
json=payload
)
# Check for errors
if response.status_code != 200:
logger.error(f"Error from Hugging Face API: {response.status_code} - {response.text}")
return {
"success": False,
"error": f"Hugging Face API error: {response.status_code}",
"response_time": time.time() - start_time,
"model": model,
"provider": "huggingface"
}
result = response.json()
# Handle different response formats
generated_text = ""
if isinstance(result, list) and len(result) > 0:
if "generated_text" in result[0]:
generated_text = result[0]["generated_text"]
else:
generated_text = result[0].get("text", "")
elif "generated_text" in result:
generated_text = result["generated_text"]
return {
"success": True,
"text": generated_text,
"model": model,
"provider": "huggingface",
"response_time": time.time() - start_time,
"raw_response": result
}
except Exception as e:
logger.error(f"Error generating text with Hugging Face: {e}")
return {
"success": False,
"error": str(e),
"response_time": time.time() - start_time,
"model": model,
"provider": "huggingface"
}
def generate_image(self,
prompt: str,
model: str = "stabilityai/stable-diffusion-xl-base-1.0",
height: int = 512,
width: int = 512,
**kwargs) -> Dict[str, Any]:
"""Generate image using Hugging Face image generation models"""
start_time = time.time()
try:
url = f"{self.base_url}/{model}"
payload = {
"inputs": prompt,
"parameters": {
"height": height,
"width": width,
**kwargs
}
}
response = requests.post(
url,
headers=self.headers,
json=payload
)
# Image response is binary
if response.status_code != 200:
logger.error(f"Error from Hugging Face API: {response.status_code} - {response.text}")
return {
"success": False,
"error": f"Hugging Face API error: {response.status_code}",
"response_time": time.time() - start_time,
"model": model,
"provider": "huggingface"
}
# Return binary image data in base64
import base64
image_data = base64.b64encode(response.content).decode("utf-8")
return {
"success": True,
"image_data": image_data,
"model": model,
"provider": "huggingface",
"response_time": time.time() - start_time
}
except Exception as e:
logger.error(f"Error generating image with Hugging Face: {e}")
return {
"success": False,
"error": str(e),
"response_time": time.time() - start_time,
"model": model,
"provider": "huggingface"
}
def get_available_models(self, task: str = "text-generation") -> List[Dict[str, Any]]:
"""Get available models for a specific task"""
try:
url = "https://huggingface.co/api/models"
params = {
"filter": task,
"sort": "downloads",
"direction": -1,
"limit": 100
}
response = requests.get(url, params=params)
if response.status_code != 200:
logger.error(f"Error fetching models: {response.status_code} - {response.text}")
return []
models = response.json()
return [
{
"id": model["id"],
"name": model.get("name", model["id"]),
"downloads": model.get("downloads", 0),
"tags": model.get("tags", [])
}
for model in models
]
except Exception as e:
logger.error(f"Error fetching models: {e}")
return []
# Example usage
if __name__ == "__main__":
# Test the provider
provider = HuggingFaceProvider()
result = provider.generate_text("Write a short poem about AI.")
print(json.dumps(result, indent=2)) |