HamidOmarov commited on
Commit
8e5c11e
·
verified ·
1 Parent(s): a1e8893

Delete day2

Browse files
Files changed (2) hide show
  1. day2/chatbot.py +0 -16
  2. day2/first_chain.py +0 -39
day2/chatbot.py DELETED
@@ -1,16 +0,0 @@
1
- cat > day2/chatbot.py << 'PY'
2
- from dotenv import load_dotenv
3
- load_dotenv()
4
-
5
- import gradio as gr
6
- from langchain_groq import ChatGroq
7
-
8
- def chat(message, history):
9
- llm = ChatGroq(model="llama3-8b-8192")
10
- resp = llm.invoke(message)
11
- return resp.content
12
-
13
- demo = gr.ChatInterface(chat)
14
- if __name__ == "__main__":
15
- demo.launch()
16
- PY
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
day2/first_chain.py DELETED
@@ -1,39 +0,0 @@
1
- from dotenv import load_dotenv
2
- import os
3
- from langchain_groq import ChatGroq
4
- from langchain.prompts import PromptTemplate
5
- from langchain.chains import LLMChain
6
-
7
- load_dotenv()
8
-
9
- # Step 1: Set up the LLM (Groq with LLaMA 3.1)
10
- llm = ChatGroq(model="llama3-8b-8192")
11
-
12
- # Step 2: Define prompt template with Azerbaijan context
13
- template = """
14
- You are an expert assistant with deep knowledge about Azerbaijan.
15
-
16
- Here is some context about Azerbaijan:
17
- Azerbaijan is a country located at the crossroads of Eastern Europe and Western Asia. It is known for its rich culture, history, oil resources, and modern capital Baku.
18
-
19
- Now, answer the following question clearly and concisely:
20
- {question}
21
- """
22
-
23
- prompt = PromptTemplate.from_template(template)
24
-
25
- # Step 3: Create the LangChain LLMChain
26
- chain = LLMChain(llm=llm, prompt=prompt)
27
-
28
- # Step 4: Run the chain with a user question
29
- if __name__ == "__main__":
30
- user_question = input("Enter your question: ")
31
- answer = chain.run(user_question)
32
- print("\nAnswer:\n", answer)
33
- # Step 5: Test the chain with a sample question
34
- # Example: "What is the capital of Azerbaijan?"
35
- # This will prompt the user to enter a question and provide an answer based on the context
36
- # Note: Ensure you have the necessary environment set up to run this code, including the
37
- # LangChain and Groq libraries installed and configured.
38
- # You can run this script in an environment where the Groq model is accessible.
39
- # Make sure to handle any exceptions or errors that may arise during execution.