pratikshahp commited on
Commit
1fb01d9
·
verified ·
1 Parent(s): 485cde9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -1,13 +1,15 @@
1
  import os
2
  from dotenv import load_dotenv
3
  import gradio as gr
4
- from langchain import PromptTemplate
5
  from langchain_huggingface import HuggingFaceEndpoint
 
6
 
7
  # Load environment variables
8
  load_dotenv()
9
 
10
  HF_TOKEN = os.getenv("HF_TOKEN")
 
11
  # Initialize the HuggingFace model
12
  llm = HuggingFaceEndpoint(
13
  repo_id="mistralai/Mistral-7B-Instruct-v0.3",
@@ -15,6 +17,7 @@ llm = HuggingFaceEndpoint(
15
  temperature=0.7,
16
  max_new_tokens=200
17
  )
 
18
  # Define a prompt template for generating a blog
19
  TEMPLATE = """
20
  Write a detailed blog post on the following topic:
@@ -24,9 +27,9 @@ Make sure the blog post is informative, engaging, and well-structured.
24
 
25
  # Create a prompt template instance
26
  blog_prompt_template = PromptTemplate(input_variables=["topic"], template=TEMPLATE)
27
- prompt = blog_prompt_template
28
- # Initialize the LLMChain
29
- blog_chain = llm | prompt
30
 
31
  def generate_blog_post(topic: str, author_name: str) -> str:
32
  if topic:
 
1
  import os
2
  from dotenv import load_dotenv
3
  import gradio as gr
4
+ from langchain_core.prompts import PromptTemplate
5
  from langchain_huggingface import HuggingFaceEndpoint
6
+ from langchain_core.chains import LLMChain
7
 
8
  # Load environment variables
9
  load_dotenv()
10
 
11
  HF_TOKEN = os.getenv("HF_TOKEN")
12
+
13
  # Initialize the HuggingFace model
14
  llm = HuggingFaceEndpoint(
15
  repo_id="mistralai/Mistral-7B-Instruct-v0.3",
 
17
  temperature=0.7,
18
  max_new_tokens=200
19
  )
20
+
21
  # Define a prompt template for generating a blog
22
  TEMPLATE = """
23
  Write a detailed blog post on the following topic:
 
27
 
28
  # Create a prompt template instance
29
  blog_prompt_template = PromptTemplate(input_variables=["topic"], template=TEMPLATE)
30
+
31
+ # Initialize the LLMChain with the HuggingFace model and prompt template
32
+ blog_chain = LLMChain(llm=llm, prompt=blog_prompt_template)
33
 
34
  def generate_blog_post(topic: str, author_name: str) -> str:
35
  if topic: