CCockrum commited on
Commit
2544f4c
·
verified ·
1 Parent(s): 47e51a0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -11
app.py CHANGED
@@ -8,15 +8,26 @@ import tempfile
8
  from transformers import pipeline
9
 
10
 
11
- # Load Two Models
12
- light_chat_model = pipeline(
13
- "text-generation",
14
- model="mistralai/Mistral-7B-Instruct-v0.3",
15
- revision="refs/pr/23", # Use latest stable revision if needed
16
- use_auth_token="HUGGINGFACE_TOKEN",
17
- max_length=256
18
- )
19
- advanced_summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
 
 
 
 
 
 
 
 
 
 
 
20
 
21
  def answer_investing_question(question):
22
  response = light_chat_model(question)[0]['generated_text']
@@ -202,12 +213,12 @@ with gr.Blocks(theme="soft") as iface:
202
  output_sector = gr.Dataframe()
203
  with gr.Tab("Historical Price Chart"):
204
  output_chart = gr.Plot()
205
- with gr.Blocks(theme="soft") as iface:
206
  with gr.Tab("Ask About Investing"):
207
  user_question = gr.Textbox(label="Ask about investing...")
208
  answer_box = gr.Textbox(label="Answer")
209
  ask_button = gr.Button("Get Answer")
210
- ask_button.click(fn=answer_investing_question, inputs=[user_question], outputs=[answer_box])
211
 
212
 
213
  submit_btn = gr.Button("Run Analysis")
 
8
  from transformers import pipeline
9
 
10
 
11
+ # Your Hugging Face API Token (set this safely)
12
+ HF_TOKEN = os.getenv("HUGGINGFACE_TOKEN") # Recommended: use environment variable
13
+
14
+ # Mistral Inference API URL
15
+ API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3"
16
+
17
+ # Headers for authentication
18
+ headers = {
19
+ "Authorization": f"Bearer {HF_TOKEN}"
20
+ }
21
+
22
+ # Function to query Mistral API
23
+ def query_mistral(question):
24
+ payload = {
25
+ "inputs": question,
26
+ "parameters": {"max_length": 256}
27
+ }
28
+ response = requests.post(API_URL, headers=headers, json=payload)
29
+ output = response.json()
30
+ return output[0]["generated_text"]
31
 
32
  def answer_investing_question(question):
33
  response = light_chat_model(question)[0]['generated_text']
 
213
  output_sector = gr.Dataframe()
214
  with gr.Tab("Historical Price Chart"):
215
  output_chart = gr.Plot()
216
+ with gr.Blocks(theme="soft") as iface:
217
  with gr.Tab("Ask About Investing"):
218
  user_question = gr.Textbox(label="Ask about investing...")
219
  answer_box = gr.Textbox(label="Answer")
220
  ask_button = gr.Button("Get Answer")
221
+ ask_button.click(fn=query_mistral, inputs=[user_question], outputs=[answer_box])
222
 
223
 
224
  submit_btn = gr.Button("Run Analysis")