Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,26 @@ import requests
|
|
4 |
import os
|
5 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
6 |
from huggingface_hub import login
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
# Load token from Hugging Face Space secrets
|
9 |
HF_TOKEN = os.getenv("Allie", None)
|
@@ -12,7 +32,7 @@ if HF_TOKEN:
|
|
12 |
|
13 |
# === Available Models for Selection ===
|
14 |
model_map = {
|
15 |
-
"FinGPT
|
16 |
"InvestLM (AWQ)": {"id": "yixuantt/InvestLM-mistral-AWQ", "local": False},
|
17 |
"FinLLaMA (LLaMA3.1-8B)": {"id": "us4/fin-llama3.1-8b", "local": False},
|
18 |
"FinanceConnect (13B)": {"id": "ceadar-ie/FinanceConnect-13B", "local": True},
|
|
|
4 |
import os
|
5 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
6 |
from huggingface_hub import login
|
7 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
8 |
+
from peft import PeftModel
|
9 |
+
import torch
|
10 |
+
|
11 |
+
@st.cache_resource
|
12 |
+
def load_fingpt_lora():
|
13 |
+
base_model_id = "meta-llama/Llama-2-7b-hf"
|
14 |
+
lora_adapter_id = "FinGPT/fingpt-mt_llama2-7b_lora"
|
15 |
+
tokenizer = AutoTokenizer.from_pretrained(base_model_id, use_auth_token=HF_TOKEN)
|
16 |
+
|
17 |
+
base_model = AutoModelForCausalLM.from_pretrained(
|
18 |
+
base_model_id,
|
19 |
+
torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
|
20 |
+
device_map="auto",
|
21 |
+
use_auth_token=HF_TOKEN
|
22 |
+
)
|
23 |
+
|
24 |
+
model = PeftModel.from_pretrained(base_model, lora_adapter_id, use_auth_token=HF_TOKEN)
|
25 |
+
return model, tokenizer
|
26 |
+
|
27 |
|
28 |
# Load token from Hugging Face Space secrets
|
29 |
HF_TOKEN = os.getenv("Allie", None)
|
|
|
32 |
|
33 |
# === Available Models for Selection ===
|
34 |
model_map = {
|
35 |
+
"FinGPT LoRA" = {"id": "FinGPT/fingpt-mt_llama2-7b_lora", "local": True, "custom_loader": load_fingpt_lora},
|
36 |
"InvestLM (AWQ)": {"id": "yixuantt/InvestLM-mistral-AWQ", "local": False},
|
37 |
"FinLLaMA (LLaMA3.1-8B)": {"id": "us4/fin-llama3.1-8b", "local": False},
|
38 |
"FinanceConnect (13B)": {"id": "ceadar-ie/FinanceConnect-13B", "local": True},
|