Spaces:
Sleeping
Sleeping
from transformers import pipeline | |
import streamlit as st | |
# Load the text generation pipeline | |
pipe = pipeline("text-generation", model="meta-llama/Meta-Llama-3-8B") | |
def generate_blog(topic, no_words): | |
# Create the prompt | |
prompt = f"Write a blog on the topic '{topic}' within {no_words} words." | |
# Generate the blog content | |
result = pipe(prompt, max_length=int(no_words), num_return_sequences=1) | |
# Extract the generated text | |
blog_content = result[0]['generated_text'] | |
return blog_content | |
# Streamlit app | |
st.set_page_config(page_title="Blog Generator", page_icon="π") | |
st.title("Blog Content Generator π") | |
# Input fields | |
topic = st.text_input("Enter the Blog Topic") | |
no_words = st.number_input("Enter the Number of Words", min_value=50, max_value=1000, value=200, step=50) | |
if st.button("Generate Blog"): | |
if topic and no_words: | |
with st.spinner("Generating blog content..."): | |
blog_content = generate_blog(topic, no_words) | |
st.subheader("Generated Blog Content") | |
st.write(blog_content) | |
else: | |
st.error("Please provide both the blog topic and the number of words.") | |
import streamlit as st | |
from langchain.prompts import PromptTemplate | |
from langchain_community.llms import CTransformers | |
## Function To get response from LLaMA 2 model | |
def getLLamaresponse(input_text, no_words, blog_style): | |
### LLaMA 2 model | |
llm = CTransformers( | |
model='llama-2-7b-chat.ggmlv3.q8_0.bin', | |
model_type='llama', | |
config={'max_new_tokens': 256, 'temperature': 0.01} | |
) | |
## Prompt Template | |
template = """ | |
Write a blog for {blog_style} job profile for a topic {input_text} | |
within {no_words} words. | |
""" | |
prompt = PromptTemplate( | |
input_variables=["blog_style", "input_text", "no_words"], | |
template=template | |
) | |
## Generate the response from the LLaMA 2 model | |
response = llm.invoke(prompt.format(blog_style=blog_style, input_text=input_text, no_words=no_words)) | |
return response | |
def main(): | |
st.set_page_config( | |
page_title="Generate Blogs", | |
page_icon='π€', | |
layout='centered', | |
initial_sidebar_state='collapsed' | |
) | |
st.header("Generate Blogs π€") | |
input_text = st.text_input("Enter the Blog Topic") | |
## Creating two more columns for additional fields | |
col1, col2 = st.columns([5, 5]) | |
with col1: | |
no_words = st.text_input('Number of Words') | |
with col2: | |
blog_style = st.selectbox('Writing the blog for', ('Researchers', 'Data Scientist', 'Common People'), index=0) | |
submit = st.button("Generate") | |
## Final response | |
if submit: | |
if not input_text or not no_words: | |
st.error("Please enter both the blog topic and the number of words.") | |
else: | |
try: | |
response = getLLamaresponse(input_text, no_words, blog_style) | |
st.write(response) | |
except Exception as e: | |
st.error(f"An error occurred: {e}") | |
if __name__ == "__main__": | |
main() | |