File size: 2,130 Bytes
0a07732 c4b57ef bd62f39 c4b57ef 5be60ea 50adde5 c4b57ef 0a07732 7925389 0a07732 d03ad5d 0a07732 c4b57ef 5be60ea c4b57ef d3bf19f cd66d08 60466ba cd66d08 c4b57ef 0a07732 4fa7a54 0a07732 4fa7a54 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 |
import gradio as gr
import requests
from g4f import Provider, models
from langchain.llms.base import LLM
import g4f
from langchain_g4f import G4FLLM
g4f.debug.logging = True # Enable logging
g4f.check_version = False # Disable automatic version checking
print(g4f.version) # Check version
print(g4f.Provider.Ails.params) # Supported args
url = "https://app.embedchain.ai/api/v1/pipelines/f14b3df8-db63-456c-8a7f-4323b4467271/context/"
def greet(name):
payload = {
"query": f"{name}",
"count": 15
}
headers = {
'Authorization': 'Token ec-pbVFWamfNAciPwb18ZwaQkKKUCCBnafko9ydl3Y5',
}
response = requests.request("POST", url, headers=headers, json=payload)
print(name)
c = response.text
llm = LLM = G4FLLM(
model=models.gpt_35_turbo_16k
)
res = llm(f"""
Use the following pieces of context to answer the query at the end.
If you don't know the answer, just say that you don't know, don't try to make up an answer.
${c}
Query: ${name}
Helpful Answer:
system_prompt: |
Agis en tant qu'assistant juridique gabonais Répons au question en français et en citant les articles .
""")
print(res)
return res
with gr.Blocks() as demo:
chatbot = gr.Chatbot()
msg = gr.Textbox()
clear = gr.ClearButton([msg, chatbot])
def respond(message, chat_history):
print(message)
print("ok")
print(chat_history)
res = llm(f"""
Use the following pieces of context to answer the query at the end.
If you don't know the answer, just say that you don't know, don't try to make up an answer.
${c}
Query: ${name}
Helpful Answer:
system_prompt: |
Agis en tant qu'assistant juridique gabonais Répons au question en français et en citant les articles .
""")
bot_message = res
chat_history.append((message))
time.sleep(2)
return "", chat_history
msg.submit(respond, [msg, chatbot], [msg, chatbot])
demo.launch()
#iface = gr.Interface(fn=greet, inputs="text", outputs="text")
#iface.launch() |