File size: 1,667 Bytes
bea420f
 
 
 
 
176e824
bea420f
 
 
 
 
 
017da00
bea420f
 
176e824
bea420f
176e824
bea420f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176e824
 
 
 
 
 
 
 
 
 
 
bea420f
 
 
 
 
 
 
 
 
176e824
bea420f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import gradio as gr

from langchain import PromptTemplate, LLMChain
from langchain.llms import HuggingFaceHub

template_by_step = """Question: {question}

Answer: Let's think step by step."""


def run(
    question: gr.Textbox = None,
    repo_id: gr.Dropdown = "google/flan-t5-xxl",
    temperature: gr.Slider = 0.5,
    max_length: gr.Slider = 64,
    by_steq: gr.Checkbox = False,
):
    template = template_by_step if by_steq else "{question}"
    prompt = PromptTemplate(template=template, input_variables=["question"])
    llm = HuggingFaceHub(
        repo_id=repo_id,
        model_kwargs={"temperature": temperature, "max_length": max_length}
    )
    llm_chain = LLMChain(prompt=prompt, llm=llm)
    result = llm_chain.run(question)
    print(result)
    return result


inputs = [
    gr.Textbox(label="Question"),
    gr.Dropdown(["google/flan-t5-xxl", "google/flan-t5-base"],
                value="google/flan-t5-xxl", label="Model", allow_custom_value=True),
    gr.Slider(0.0, 1.0, value=0.5, step=0.05, label="Temperature"),
    gr.Slider(20, 1000, value=64, label="Max Length"),
    gr.Checkbox(label="Think step by step", value=False),
]

examples = [
    ["What is the capital of France?"],
    ["What's the Earth total population?"],
    ["Who won the FIFA World Cup in the year 1994?"],
    ["What NFL team won the Super Bowl in the year Justin Bieber was born?"],
    ["Translate the following to French: There are so many plans"],
    ["Write an article to introduce machine learning"],
]

title = "Langchain w/ HF Models"

gr.Interface(
    fn=run,
    inputs=inputs,
    outputs='label',
    title=title,
    examples=examples,
).launch()