detroitnatif commited on
Commit
82e18fa
·
1 Parent(s): 9e8b2ca

Deploying to Streamlit

Browse files
.env CHANGED
@@ -1,3 +1,3 @@
1
- API_KEy='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'
2
  GROQ_API_KEY = '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'
3
  SERPER_API_KEY = 'fc4e9e4dda3ab09a3b51896f6d0211a7bc19db9e'
 
1
+ OPENAI_API_KEY='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'
2
  GROQ_API_KEY = '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'
3
  SERPER_API_KEY = 'fc4e9e4dda3ab09a3b51896f6d0211a7bc19db9e'
LangchainSearch.py DELETED
@@ -1,14 +0,0 @@
1
- import os
2
- from dotenv import load_dotenv
3
- from langchain.llms import OpenAI
4
- from langchain.agents import load_tools, initialize_agent, AgentType
5
-
6
-
7
- load_dotenv()
8
-
9
- llm = OpenAI(temperature=0.5, streaming=True, openai_api_key=os.getenv('API_KEY'))
10
- tools = load_tools(
11
- ''
12
- )
13
-
14
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
__pycache__/config.cpython-39.pyc ADDED
Binary file (1.19 kB). View file
 
__pycache__/researcher.cpython-39.pyc ADDED
Binary file (3.85 kB). View file
 
app.py CHANGED
@@ -1,6 +1,6 @@
1
  import streamlit as st
2
  from streamlit_chat import message
3
- from GroqSearch.researcher import Researcher
4
  from dotenv import find_dotenv, load_dotenv
5
  load_dotenv(find_dotenv())
6
  st.set_page_config(layout="wide")
 
1
  import streamlit as st
2
  from streamlit_chat import message
3
+ from researcher import Researcher
4
  from dotenv import find_dotenv, load_dotenv
5
  load_dotenv(find_dotenv())
6
  st.set_page_config(layout="wide")
requirements.txt DELETED
@@ -1,122 +0,0 @@
1
- aiohttp==3.9.3
2
- aiosignal==1.3.1
3
- altair==5.2.0
4
- annotated-types==0.6.0
5
- anyio==4.3.0
6
- attrs==23.2.0
7
- backoff==2.2.1
8
- beautifulsoup4==4.12.3
9
- blinker==1.7.0
10
- cachetools==5.3.3
11
- certifi==2024.2.2
12
- chardet==5.2.0
13
- charset-normalizer==3.3.2
14
- click==8.1.7
15
- colorama==0.4.6
16
- contourpy==1.2.0
17
- cycler==0.12.1
18
- dataclasses-json==0.6.4
19
- distro==1.9.0
20
- emoji==2.10.1
21
- faiss-cpu==1.8.0
22
- filelock==3.9.0
23
- filetype==1.2.0
24
- fonttools==4.49.0
25
- frozenlist==1.4.1
26
- fsspec==2024.2.0
27
- gitdb==4.0.11
28
- GitPython==3.1.42
29
- greenlet==3.0.3
30
- groq==0.4.2
31
- h11==0.14.0
32
- httpcore==1.0.4
33
- httpx==0.27.0
34
- huggingface-hub==0.21.3
35
- idna==3.6
36
- importlib-metadata==7.0.1
37
- Jinja2==3.1.2
38
- joblib==1.3.2
39
- jsonpatch==1.33
40
- jsonpath-python==1.0.6
41
- jsonpointer==2.4
42
- jsonschema==4.21.1
43
- jsonschema-specifications==2023.12.1
44
- kiwisolver==1.4.5
45
- langchain==0.1.10
46
- langchain-community==0.0.25
47
- langchain-core==0.1.28
48
- langchain-groq==0.0.1
49
- langchain-text-splitters==0.0.1
50
- langdetect==1.0.9
51
- langsmith==0.1.14
52
- lxml==5.1.0
53
- markdown-it-py==3.0.0
54
- MarkupSafe==2.1.3
55
- marshmallow==3.21.0
56
- matplotlib==3.8.3
57
- mdurl==0.1.2
58
- mpmath==1.3.0
59
- multidict==6.0.5
60
- mypy-extensions==1.0.0
61
- networkx==3.2.1
62
- nltk==3.8.1
63
- numpy==1.26.4
64
- orjson==3.9.15
65
- packaging==23.2
66
- pandas==2.2.1
67
- pillow==10.2.0
68
- protobuf==4.25.3
69
- pyarrow==15.0.0
70
- pydantic==2.6.3
71
- pydantic_core==2.16.3
72
- pydeck==0.8.1b0
73
- Pygments==2.17.2
74
- pyparsing==3.1.1
75
- python-dateutil==2.9.0.post0
76
- python-dotenv==1.0.1
77
- python-iso639==2024.2.7
78
- pytz==2024.1
79
- PyYAML==6.0.1
80
- rapidfuzz==3.6.1
81
- referencing==0.33.0
82
- regex==2023.12.25
83
- requests==2.31.0
84
- rich==13.7.1
85
- rpds-py==0.18.0
86
- safetensors==0.4.2
87
- scikit-learn==1.4.1.post1
88
- scipy==1.12.0
89
- seaborn==0.13.2
90
- sentence-transformers==2.5.1
91
- six==1.16.0
92
- smmap==5.0.1
93
- sniffio==1.3.1
94
- soupsieve==2.5
95
- SQLAlchemy==2.0.27
96
- streamlit==1.31.1
97
- streamlit-chat==0.1.1
98
- sympy==1.12
99
- tabulate==0.9.0
100
- tenacity==8.2.3
101
- threadpoolctl==3.3.0
102
- tokenizers==0.15.2
103
- toml==0.10.2
104
- toolz==0.12.1
105
- torch==2.2.1+cu118
106
- torchaudio==2.2.1+cu118
107
- torchvision==0.17.1+cu118
108
- tornado==6.4
109
- tqdm==4.66.2
110
- transformers==4.38.2
111
- typing-inspect==0.9.0
112
- typing_extensions==4.8.0
113
- tzdata==2024.1
114
- tzlocal==5.2
115
- unstructured==0.11.8
116
- unstructured-client==0.21.0
117
- urllib3==2.2.1
118
- validators==0.22.0
119
- watchdog==4.0.0
120
- wrapt==1.16.0
121
- yarl==1.9.4
122
- zipp==3.17.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
researcher.py CHANGED
@@ -85,13 +85,17 @@ class Researcher:
85
  return research_content
86
 
87
  def research_given_query(self, research_objective, research_content):
88
-
89
  docs = self.text_splitter.split_documents(research_content)
 
 
 
 
90
  self.db = FAISS.from_documents(documents=docs, embedding=self.hfembeddings)
91
  bot = self.research_answerer()
92
- research_out =bot({"query": research_objective})
93
  return research_out["result"]
94
 
 
95
  def research(self, query):
96
  search_articles = self.search_articles(query)
97
  urls = self.get_urls(search_articles)
 
85
  return research_content
86
 
87
  def research_given_query(self, research_objective, research_content):
 
88
  docs = self.text_splitter.split_documents(research_content)
89
+ if not docs: # Check if docs is empty
90
+ print("No documents found for research content.")
91
+ return None # Handle the case as appropriate for your application
92
+ # Proceed with FAISS index creation only if documents are available
93
  self.db = FAISS.from_documents(documents=docs, embedding=self.hfembeddings)
94
  bot = self.research_answerer()
95
+ research_out = bot({"query": research_objective})
96
  return research_out["result"]
97
 
98
+
99
  def research(self, query):
100
  search_articles = self.search_articles(query)
101
  urls = self.get_urls(search_articles)