Gayatrikh16 commited on
Commit
7596367
·
verified ·
1 Parent(s): ac2e695

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -41
app.py CHANGED
@@ -1,53 +1,46 @@
1
  import streamlit as st
2
- from langchain.prompts import PromptTemplate
3
- from langchain.llms import CTransformers
4
-
5
- ## Function To get response from LLAma 2 model
6
-
7
- def getLLamaresponse(input_text,keywords,blog_style):
8
-
9
- ### LLama2 model
10
- llm=CTransformers(model="models/llama-2-7b-chat.ggmlv3.q2_K.bin",
11
- model_type='llama',
12
- config={'max_new_tokens':250,
13
- 'temperature':0.01})
14
-
15
- ## Prompt Template
16
-
17
- template="""
18
- Generate project idea for {blog_style} by using keywords like {keywords} for the profession of {input_text} .
19
- """
20
-
21
- prompt=PromptTemplate(input_variables=["blog_style","input_text",'keywords'],
22
- template=template)
23
-
24
- ## Generate the ressponse from the LLama 2 model
25
- response=llm(prompt.format(blog_style=blog_style,input_text=input_text,keywords=keywords))
26
- print(response)
27
- return response
28
 
29
- st.set_page_config(page_title="Generate Project Idea",
30
- page_icon='🤖',
31
- layout='centered',
32
- initial_sidebar_state='collapsed')
33
 
34
- st.header("Generate Project Idea 🤖")
 
 
 
 
 
 
 
 
 
 
 
35
 
36
- input_text=st.text_input("Enter the Topic")
 
 
37
 
 
 
 
 
 
 
38
 
39
- ## creating to more columns for additonal 2 fields
40
 
41
- col1,col2=st.columns([5,5])
 
42
 
43
  with col1:
44
- no_words=st.text_input('Keywords')
45
  with col2:
46
- blog_style=st.selectbox('Generating project idea for',
47
- ('Researchers','Data Scientist','Software Developer','Common People', " "),index=0)
48
-
49
- submit=st.button("Generate")
50
 
51
- ## Final response
52
  if submit:
53
- st.write(getLLamaresponse(input_text,no_words,blog_style))
 
 
1
  import streamlit as st
2
+ from transformers import pipeline
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
+ # Function to get response from LLaMA 2 model
 
 
 
5
 
6
+ def getLLamaresponse(input_text, keywords, blog_style):
7
+ # Load the LLaMA 2 model from Hugging Face
8
+ model_name = "meta-llama/Llama-2-7b-chat-hf"
9
+ llm = pipeline('text-generation', model=model_name)
10
+
11
+ # Prompt Template
12
+ template = """
13
+ Generate project idea for {blog_style} by using keywords like {keywords} for the profession of {input_text}.
14
+ """
15
+
16
+ # Format the prompt
17
+ prompt = template.format(blog_style=blog_style, input_text=input_text, keywords=keywords)
18
 
19
+ # Generate the response from the LLaMA 2 model
20
+ response = llm(prompt, max_length=250, temperature=0.01)
21
+ return response[0]['generated_text']
22
 
23
+ st.set_page_config(page_title="Generate Project Idea",
24
+ page_icon='🤖',
25
+ layout='centered',
26
+ initial_sidebar_state='collapsed')
27
+
28
+ st.header("Generate Project Idea 🤖")
29
 
30
+ input_text = st.text_input("Enter the Topic")
31
 
32
+ # Creating two more columns for additional fields
33
+ col1, col2 = st.columns([5, 5])
34
 
35
  with col1:
36
+ no_words = st.text_input('Keywords')
37
  with col2:
38
+ blog_style = st.selectbox('Generating project idea for',
39
+ ('Researchers', 'Data Scientist', 'Software Developer', 'Common People', " "), index=0)
40
+
41
+ submit = st.button("Generate")
42
 
43
+ # Final response
44
  if submit:
45
+ response = getLLamaresponse(input_text, no_words, blog_style)
46
+ st.write(response)