Spaces:
Runtime error
Runtime error
import os | |
from langchain.llms.huggingface_hub import HuggingFaceHub | |
from langchain.prompts import ChatPromptTemplate | |
from langchain.schema import StrOutputParser | |
from langchain.schema.runnable import Runnable | |
from langchain.schema.runnable.config import RunnableConfig | |
from chainlit.playground.config import add_llm_provider | |
from chainlit.playground.providers.langchain import LangchainGenericProvider | |
import chainlit as cl | |
from authlib.integrations.requests_client import OAuth2Session | |
import os | |
# Retrieving environment variables | |
OAUTH_CLIENT_ID = os.getenv("OAUTH_CLIENT_ID") | |
OAUTH_CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET") | |
OAUTH_SCOPES = os.getenv("OAUTH_SCOPES").split(',') # Assuming OAUTH_SCOPES is a comma-separated list | |
OPENID_PROVIDER_URL = os.getenv("OPENID_PROVIDER_URL") | |
SPACE_HOST = os.getenv("SPACE_HOST") | |
# Constructing the redirect URL using the SPACE_HOST variable | |
redirect_uri = f"https://{SPACE_HOST}/login/callback" | |
# Initializing the OAuth client/session with the retrieved environment variables | |
oauth_client = OAuth2Session(client_id="a70d7765-e2bb-45db-b2b2-2deb0963df13", | |
client_secret="e5801934-7016-446d-839f-d169a0859f7b", # Include client_secret if needed for the OAuth2Session setup | |
scope=OAUTH_SCOPES, | |
redirect_uri=redirect_uri) | |
# Use the corrected method to generate the authorization URL | |
authorization_url, state = oauth_client.create_authorization_url(OPENID_PROVIDER_URL + '/authorize') | |
print(authorization_url, state) | |
# The rest of your OAuth flow would go here, including redirecting the user to the authorization_url, | |
# and then handling the redirect back to your application to exchange the code for a token. | |
# Instantiate the LLM | |
llm = HuggingFaceHub( | |
model_kwargs={"max_length": 500}, | |
repo_id="google/flan-t5-xxl", | |
huggingfacehub_api_token=os.environ["HUGGINGFACE_API_TOKEN"], | |
) | |
# Add the LLM provider | |
add_llm_provider( | |
LangchainGenericProvider( | |
# It is important that the id of the provider matches the _llm_type | |
id=llm._llm_type, | |
# The name is not important. It will be displayed in the UI. | |
name="HuggingFaceHub", | |
# This should always be a Langchain llm instance (correctly configured) | |
llm=llm, | |
# If the LLM works with messages, set this to True | |
is_chat=False, | |
) | |
) | |
from typing import Dict, Optional | |
import chainlit as cl | |
async def on_chat_start(): | |
app_user = cl.user_session.get("user") | |
await cl.Message(f"Hello {app_user.identifier}").send() | |
prompt = ChatPromptTemplate.from_messages( | |
[ | |
("human", "{question}"), | |
] | |
) | |
runnable = prompt | llm | StrOutputParser() | |
cl.user_session.set("runnable", runnable) | |
async def on_message(message: cl.Message): | |
runnable = cl.user_session.get("runnable") # type: Runnable | |
msg = cl.Message(content="") | |
async for chunk in runnable.astream( | |
{"question": message.content}, | |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]), | |
): | |
await msg.stream_token(chunk) | |
await msg.send() | |