File size: 3,191 Bytes
cec6273
8733154
9d29527
 
 
 
 
 
 
 
56a4ec8
 
e84061b
f5a0b90
 
2ea4da1
 
 
 
 
 
 
 
 
 
 
7a5c234
 
2ea4da1
 
 
 
 
 
 
 
 
f5a0b90
 
9d29527
 
 
 
2adff4f
9d29527
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56a4ec8
083c759
 
 
 
8733154
9d29527
47c742b
 
9d29527
 
 
 
 
 
 
8733154
 
 
9d29527
 
 
 
 
 
 
 
 
 
 
13e9699
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import os

from langchain.llms.huggingface_hub import HuggingFaceHub
from langchain.prompts import ChatPromptTemplate
from langchain.schema import StrOutputParser
from langchain.schema.runnable import Runnable
from langchain.schema.runnable.config import RunnableConfig

from chainlit.playground.config import add_llm_provider
from chainlit.playground.providers.langchain import LangchainGenericProvider
import chainlit as cl

from authlib.integrations.requests_client import OAuth2Session
import os

# Retrieving environment variables
OAUTH_CLIENT_ID = os.getenv("OAUTH_CLIENT_ID")
OAUTH_CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET")
OAUTH_SCOPES = os.getenv("OAUTH_SCOPES").split(',')  # Assuming OAUTH_SCOPES is a comma-separated list
OPENID_PROVIDER_URL = os.getenv("OPENID_PROVIDER_URL")
SPACE_HOST = os.getenv("SPACE_HOST")

# Constructing the redirect URL using the SPACE_HOST variable
redirect_uri = f"https://{SPACE_HOST}/login/callback"

# Initializing the OAuth client/session with the retrieved environment variables
oauth_client = OAuth2Session(client_id="a70d7765-e2bb-45db-b2b2-2deb0963df13",
                             client_secret="e5801934-7016-446d-839f-d169a0859f7b",  # Include client_secret if needed for the OAuth2Session setup
                             scope=OAUTH_SCOPES,
                             redirect_uri=redirect_uri)

# Use the corrected method to generate the authorization URL
authorization_url, state = oauth_client.create_authorization_url(OPENID_PROVIDER_URL + '/authorize')

print(authorization_url, state)
# The rest of your OAuth flow would go here, including redirecting the user to the authorization_url,
# and then handling the redirect back to your application to exchange the code for a token.


# Instantiate the LLM
llm = HuggingFaceHub(
    model_kwargs={"max_length": 500},
    repo_id="google/flan-t5-xxl",
    huggingfacehub_api_token=os.environ["HUGGINGFACE_API_TOKEN"],
)

# Add the LLM provider
add_llm_provider(
    LangchainGenericProvider(
        # It is important that the id of the provider matches the _llm_type
        id=llm._llm_type,
        # The name is not important. It will be displayed in the UI.
        name="HuggingFaceHub",
        # This should always be a Langchain llm instance (correctly configured)
        llm=llm,
        # If the LLM works with messages, set this to True
        is_chat=False,
    )
)

from typing import Dict, Optional
import chainlit as cl


@cl.on_chat_start
async def on_chat_start():
    app_user = cl.user_session.get("user")
    await cl.Message(f"Hello {app_user.identifier}").send()
    prompt = ChatPromptTemplate.from_messages(
        [
            ("human", "{question}"),
        ]
    )
    runnable = prompt | llm | StrOutputParser()
    cl.user_session.set("runnable", runnable)


@cl.on_message
async def on_message(message: cl.Message):
    runnable = cl.user_session.get("runnable")  # type: Runnable

    msg = cl.Message(content="")

    async for chunk in runnable.astream(
        {"question": message.content},
        config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
    ):
        await msg.stream_token(chunk)

    await msg.send()