import os from langchain.llms.huggingface_hub import HuggingFaceHub from langchain.prompts import ChatPromptTemplate from langchain.schema import StrOutputParser from langchain.schema.runnable import Runnable from langchain.schema.runnable.config import RunnableConfig from chainlit.playground.config import add_llm_provider from chainlit.playground.providers.langchain import LangchainGenericProvider import chainlit as cl from authlib.integrations.requests_client import OAuth2Session import os # Retrieving environment variables OAUTH_CLIENT_ID = os.getenv("OAUTH_CLIENT_ID") OAUTH_CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET") OAUTH_SCOPES = os.getenv("OAUTH_SCOPES").split(',') # Assuming OAUTH_SCOPES is a comma-separated list OPENID_PROVIDER_URL = os.getenv("OPENID_PROVIDER_URL") SPACE_HOST = os.getenv("SPACE_HOST") # Constructing the redirect URL using the SPACE_HOST variable redirect_uri = f"https://{SPACE_HOST}/login/callback" # Initializing the OAuth client/session with the retrieved environment variables oauth_client = OAuth2Session(client_id=OAUTH_CLIENT_ID, client_secret=OAUTH_CLIENT_SECRET, # Include client_secret if needed for the OAuth2Session setup scope=OAUTH_SCOPES, redirect_uri=redirect_uri) # Use the corrected method to generate the authorization URL authorization_url, state = oauth_client.create_authorization_url(OPENID_PROVIDER_URL + '/authorize') print(authorization_url, state) # The rest of your OAuth flow would go here, including redirecting the user to the authorization_url, # and then handling the redirect back to your application to exchange the code for a token. from flask import Flask, request, redirect import base64 import requests app = Flask(__name__) @app.route('/login/callback') # def login_callback(): # Retrieve the authorization code and state from the callback URL code = request.args.get('code') state = request.args.get('state') # You should verify the state here (compare it to the one you stored before redirecting the user) # For simplicity, this step is not shown # Exchange the code for tokens token_url = 'https://huggingface.co/oauth/token' credentials = f"{OAUTH_CLIENT_ID}:{OAUTH_CLIENT_SECRET}" basic_auth_header = base64.b64encode(credentials.encode('utf-8')).decode('utf-8') print('IM HERE') headers = { 'Authorization': f'Basic {basic_auth_header}', 'Content-Type': 'application/x-www-form-urlencoded' } data = { 'grant_type': 'authorization_code', 'code': code, 'redirect_uri': redirect_uri, 'client_id': OAUTH_CLIENT_ID } response = requests.post(token_url, headers=headers, data=data) if response.ok: tokens = response.json() access_token = tokens['access_token'] id_token = tokens.get('id_token') # Now you can use the access_token (and id_token) to access protected resources or identify the user # For example, fetch user info from the userinfo endpoint if needed print("Login successful") # Redirect to another page or show a message else: print("Error exchanging code for tokens") # Instantiate the LLM llm = HuggingFaceHub( model_kwargs={"max_length": 500}, repo_id="google/flan-t5-xxl", huggingfacehub_api_token=os.environ["HUGGINGFACE_API_TOKEN"], ) # Add the LLM provider add_llm_provider( LangchainGenericProvider( # It is important that the id of the provider matches the _llm_type id=llm._llm_type, # The name is not important. It will be displayed in the UI. name="HuggingFaceHub", # This should always be a Langchain llm instance (correctly configured) llm=llm, # If the LLM works with messages, set this to True is_chat=False, ) ) from typing import Dict, Optional import chainlit as cl # Retrieving environment variables OAUTH_CLIENT_ID = os.getenv("OAUTH_CLIENT_ID") OAUTH_CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET") OAUTH_SCOPES = os.getenv("OAUTH_SCOPES").split(',') # Assuming OAUTH_SCOPES is a comma-separated list OPENID_PROVIDER_URL = os.getenv("OPENID_PROVIDER_URL") SPACE_HOST = os.getenv("SPACE_HOST") # Constructing the redirect URL using the SPACE_HOST variable redirect_uri = f"https://{SPACE_HOST}/login/callback" print(OAUTH_CLIENT_ID, OAUTH_CLIENT_SECRET, OAUTH_SCOPES, OPENID_PROVIDER_URL, SPACE_HOST, redirect_uri) @cl.on_chat_start async def on_chat_start(): app_user = cl.user_session.get("user") await cl.Message(f"Hello {app_user.identifier}").send() prompt = ChatPromptTemplate.from_messages( [ ("human", "{question}"), ] ) runnable = prompt | llm | StrOutputParser() cl.user_session.set("runnable", runnable) @cl.on_message async def on_message(message: cl.Message): runnable = cl.user_session.get("runnable") # type: Runnable msg = cl.Message(content="") async for chunk in runnable.astream( {"question": message.content}, config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]), ): await msg.stream_token(chunk) await msg.send() if __name__ == '__main__': app.run(debug=True)