import os model_id = os.getenv("MODEL_ID") or "meta-llama/llama-3-2-90b-vision-instruct" wx_api_key = os.getenv("WX_API_KEY") wx_project_id = os.getenv("WX_PROJECT_ID") wx_url = os.getenv("WX_URL") or "https://eu-de.ml.cloud.ibm.com" system_prompt = os.getenv("SYSTEM_PROMPT") or "" params = { "temperature": os.getenv("TEMPERATURE") or 0.7, "max_tokens": os.getenv("MAX_TOKENS") or 4096, "top_p": os.getenv("TOP_P") or 1.0, "stop": ( os.getenv("STOP_SEQUENCES", "").split(",") if os.getenv("STOP_SEQUENCES") else ["", "<|end_of_text|>", "<|endoftext|>"] ), # "frequency_penalty": os.getenv("FREQUENCY_PENALTY") or 0.5, # "presence_penalty": os.getenv("PRESENCE_PENALTY") or 0.3, } display_chat_history = os.getenv("DISPLAY_CHAT_HISTORY") or True stream_outputs = os.getenv("STREAM_OUTPUTS") or True app_password = os.getenv("APP_PASSWORD","")