ddovidovich commited on
Commit
86eb78e
·
1 Parent(s): 8f8d318

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -12
app.py CHANGED
@@ -124,18 +124,30 @@ def main():
124
 
125
  if st.button("Apply"):
126
  query = prompt
127
- st.write("in progress")
128
- text_splitter = CharacterTextSplitter(chunk_size=4096, chunk_overlap=0)
129
- texts = text_splitter.split_text(input_text)
130
- embeddings = OpenAIEmbeddings()
131
- docsearch = Chroma.from_texts(texts, embeddings, metadatas=[{"source": str(i)} for i in range(len(texts))]).as_retriever()
132
- docs = docsearch.get_relevant_documents(query)
133
- if st.session_state.model_name == 'gpt-4':
134
- max_tkns=5500
135
- else :
136
- max_tkns=3000
137
- chain = load_qa_chain(ChatOpenAI(model = st.session_state.model_name,max_tokens=max_tkns,temperature=0), chain_type="stuff")
138
- st.session_state.output = chain.run(input_documents=docs, question=query)
 
 
 
 
 
 
 
 
 
 
 
 
139
  #st.session_state["output"] = output
140
  #col3.text_area('Result', value=output, key="output_data", height=450)
141
  st.experimental_rerun()
 
124
 
125
  if st.button("Apply"):
126
  query = prompt
127
+ with st.spinner('In progress...'):
128
+ # st.write("in progress")
129
+ # text_splitter = CharacterTextSplitter(chunk_size=4096, chunk_overlap=0)
130
+ # texts = text_splitter.split_text(input_text)
131
+ # embeddings = OpenAIEmbeddings()
132
+ # docsearch = Chroma.from_texts(texts, embeddings, metadatas=[{"source": str(i)} for i in range(len(texts))]).as_retriever()
133
+ # docs = docsearch.get_relevant_documents(query)
134
+ if st.session_state.model_name == 'gpt-4':
135
+ max_tkns=5500
136
+ else :
137
+ max_tkns=3000
138
+ openai.api_key = os.environ["OPENAI_API_KEY"]
139
+ response = openai.ChatCompletion.create(
140
+ model="gpt-4",
141
+ messages=[
142
+ {"role": "system", "content": query},
143
+ {"role": "user", "content": input_text},
144
+ ],
145
+ temperature = 0.7,
146
+ max_tokens=5500
147
+ )
148
+ st.session_state.output = choice.message["content"].strip() for choice in response["choices"]
149
+ # chain = load_qa_chain(ChatOpenAI(model = st.session_state.model_name,max_tokens=max_tkns,temperature=0), chain_type="stuff")
150
+ # st.session_state.output = chain.run(input_documents=docs, question=query)
151
  #st.session_state["output"] = output
152
  #col3.text_area('Result', value=output, key="output_data", height=450)
153
  st.experimental_rerun()