import os from huggingface_hub import InferenceClient client = InferenceClient( provider="featherless-ai", api_key=os.environ["HF_TOKEN"], ) result = client.text_generation( "Can you please let us know more details about your ", model="meta-llama/Meta-Llama-3-8B", )