Spaces:
Paused
Paused
Commit
·
ba72f62
1
Parent(s):
13bbbc2
update the logic
Browse files- app.py +7 -0
- backend/services/interview_engine.py +18 -7
app.py
CHANGED
@@ -1,3 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
from flask import Flask, render_template, redirect, url_for, flash, request
|
2 |
from flask_login import LoginManager, login_required, current_user
|
3 |
from werkzeug.utils import secure_filename
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
# Ensure Hugging Face writes cache to a safe writable location on Spaces
|
4 |
+
os.environ["HF_HOME"] = "/tmp/huggingface"
|
5 |
+
os.environ["TRANSFORMERS_CACHE"] = "/tmp/huggingface/transformers"
|
6 |
+
os.environ["HUGGINGFACE_HUB_CACHE"] = "/tmp/huggingface/hub"
|
7 |
+
|
8 |
from flask import Flask, render_template, redirect, url_for, flash, request
|
9 |
from flask_login import LoginManager, login_required, current_user
|
10 |
from werkzeug.utils import secure_filename
|
backend/services/interview_engine.py
CHANGED
@@ -1,3 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import requests
|
2 |
import os
|
3 |
import json
|
@@ -114,15 +121,19 @@ groq_llm = ChatGroq(
|
|
114 |
api_key=chat_groq_api
|
115 |
)
|
116 |
|
117 |
-
from huggingface_hub import login
|
118 |
-
import os
|
119 |
|
120 |
-
HF_TOKEN = os.getenv("HF_TOKEN")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
|
122 |
-
if HF_TOKEN:
|
123 |
-
login(HF_TOKEN)
|
124 |
-
else:
|
125 |
-
raise EnvironmentError("Missing HF_TOKEN environment variable.")
|
126 |
|
127 |
#Load mistral Model
|
128 |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
# Ensure Hugging Face writes cache to a safe writable location on Spaces
|
4 |
+
os.environ["HF_HOME"] = "/tmp/huggingface"
|
5 |
+
os.environ["TRANSFORMERS_CACHE"] = "/tmp/huggingface/transformers"
|
6 |
+
os.environ["HUGGINGFACE_HUB_CACHE"] = "/tmp/huggingface/hub"
|
7 |
+
|
8 |
import requests
|
9 |
import os
|
10 |
import json
|
|
|
121 |
api_key=chat_groq_api
|
122 |
)
|
123 |
|
124 |
+
# from huggingface_hub import login
|
125 |
+
# import os
|
126 |
|
127 |
+
# HF_TOKEN = os.getenv("HF_TOKEN")
|
128 |
+
|
129 |
+
# if HF_TOKEN:
|
130 |
+
# login(HF_TOKEN)
|
131 |
+
# else:
|
132 |
+
# raise EnvironmentError("Missing HF_TOKEN environment variable.")
|
133 |
+
from huggingface_hub import HfApi
|
134 |
+
|
135 |
+
api = HfApi(token=os.getenv("HF_TOKEN")) # no need to login()
|
136 |
|
|
|
|
|
|
|
|
|
137 |
|
138 |
#Load mistral Model
|
139 |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|