Spaces:
Sleeping
Sleeping
File size: 1,524 Bytes
9f8c851 9da41c7 9f8c851 9da41c7 5251398 7596367 9da41c7 5f73c85 7596367 9da41c7 7596367 9da41c7 7596367 9da41c7 84a9d40 9da41c7 9f8c851 9da41c7 7596367 9f8c851 7596367 9f8c851 7596367 9f8c851 9da41c7 9f8c851 7596367 9da41c7 e3f5b9c 84a9d40 7596367 9f8c851 7596367 9f8c851 84a9d40 7596367 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
import streamlit as st
from transformers import pipeline
# Initialize the Hugging Face pipeline
pipe = pipeline("text-generation", model="mistralai/mathstral-7B-v0.1")
# Function to get response from the model
def get_response(input_text, keywords, blog_style, max_new_tokens=250):
# Prompt Template
template = """
Generate technical project ideas for {blog_style} job profile for a topic {input_text} using these keywords: {keywords}.
"""
prompt = template.format(blog_style=blog_style, input_text=input_text, keywords=keywords)
# Generate the response from the model
response = pipe(prompt, max_new_tokens=max_new_tokens)
return response[0]['generated_text'] # Extract the generated text
# Streamlit configuration
st.set_page_config(page_title="Generate Project Idea",
page_icon='🤖',
layout='centered',
initial_sidebar_state='collapsed')
st.header("Generate Project Idea 🤖")
input_text = st.text_input("Enter the Topic")
# Creating two more columns for additional fields
col1, col2 = st.columns([5, 5])
with col1:
keywords = st.text_input('Keywords')
with col2:
blog_style = st.selectbox('Generating project idea for',
('Researchers', 'Data Scientist', 'Software Developer', 'Common People'), index=0)
submit = st.button("Generate")
# Final response
if submit:
response = get_response(input_text, keywords, blog_style, max_new_tokens)
st.write(response)
|