Spaces:
Runtime error
Runtime error
File size: 2,219 Bytes
52d4ec9 50a4735 34cb346 50a4735 52d4ec9 50a4735 793bade 50a4735 f020e28 50a4735 793bade 50a4735 793bade 52d4ec9 793bade 52d4ec9 793bade 52d4ec9 34cb346 50a4735 793bade 52d4ec9 793bade 52d4ec9 50a4735 793bade 52d4ec9 793bade 52d4ec9 50a4735 34cb346 50a4735 793bade 50a4735 52d4ec9 793bade |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import streamlit as st
from langchain.prompts import FewShotChatMessagePromptTemplate
from langchain.prompts.example_selector import LengthBasedExampleSelector
from langchain_huggingface import HuggingFaceEndpoint, HuggingFacePipeline
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from datasets import load_dataset
from transformers import pipeline
# Load dataset (using knkarthick/dialogsum as an example)
@st.cache_data
def load_examples():
dataset = load_dataset("knkarthick/dialogsum", split="train[:5]") # Take only 5 for speed
examples = []
for example in dataset:
examples.append({
"input": example["dialogue"],
"output": example["summary"]
})
return examples
examples = load_examples()
# Load the Hugging Face model
hf_endpoint = HuggingFaceEndpoint(
endpoint_url="https://api-inference.huggingface.co/models/google/pegasus-xsum" # or any model you like
)
# Create FewShotChatMessagePromptTemplate
example_prompt = FewShotChatMessagePromptTemplate.from_examples(
examples=examples,
example_selector=LengthBasedExampleSelector(examples=examples, max_length=1000),
input_variables=["input"],
prefix="You are a helpful assistant that summarizes dialogues. Examples:",
suffix="Now summarize this:\n{input}"
)
# Streamlit UI
st.title("π¬ Dialogue Summarizer using Few-Shot Prompt + T5 (via Langchain)")
input_text = st.text_area("π Paste your conversation:")
if st.button("Generate Summary"):
if input_text.strip():
# Create the prompt using FewShotChatMessagePromptTemplate
messages = example_prompt.format_messages(input=input_text)
with st.expander("π Generated Prompt"):
for msg in messages:
st.markdown(f"**{msg.type.upper()}**:\n```\n{msg.content}\n```")
# Set up HuggingFacePipeline with the model endpoint
hf_pipeline = HuggingFacePipeline(pipeline="summarization", model=hf_endpoint)
# Generate summary
summary = hf_pipeline(messages[0].content)
st.success("β
Summary:")
st.write(summary[0]['summary_text'])
else:
st.warning("Please enter some text.")
|