import gradio as gr import requests from g4f import Provider, models from langchain.llms.base import LLM import g4f from langchain_g4f import G4FLLM g4f.debug.logging = True # Enable logging g4f.check_version = False # Disable automatic version checking print(g4f.version) # Check version print(g4f.Provider.Ails.params) # Supported args url = "https://app.embedchain.ai/api/v1/pipelines/f14b3df8-db63-456c-8a7f-4323b4467271/context/" def greet(name): payload = { "query": f"{name}", "count": 15 } headers = { 'Authorization': 'Token ec-pbVFWamfNAciPwb18ZwaQkKKUCCBnafko9ydl3Y5', } response = requests.request("POST", url, headers=headers, json=payload) print(name) c = response.text llm = LLM = G4FLLM( model=models.gpt_35_turbo_16k ) res = llm(f""" Use the following pieces of context to answer the query at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. ${c} Query: ${name} Helpful Answer: system_prompt: | Agis en tant qu'assistant juridique gabonais Répons au question en français et en citant les articles . """) print(res) return res with gr.Blocks() as demo: chatbot = gr.Chatbot() msg = gr.Textbox() clear = gr.ClearButton([msg, chatbot]) def respond(message, chat_history): print(message) print("ok") print(chat_history) res = llm(f""" Use the following pieces of context to answer the query at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. ${c} Query: ${name} Helpful Answer: system_prompt: | Agis en tant qu'assistant juridique gabonais Répons au question en français et en citant les articles . """) bot_message = res chat_history.append((message)) time.sleep(2) return "", chat_history msg.submit(respond, [msg, chatbot], [msg, chatbot]) demo.launch() #iface = gr.Interface(fn=greet, inputs="text", outputs="text") #iface.launch()