Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,32 +3,34 @@ import json
|
|
3 |
import gradio as gr
|
4 |
import openai
|
5 |
from sentence_transformers import SentenceTransformer
|
6 |
-
import
|
7 |
|
8 |
-
# بارگذاری کلید OpenAI از متغیر محیطی
|
9 |
openai.api_key = os.getenv("openai")
|
10 |
|
11 |
-
# بارگذاری مدل embedding
|
12 |
model = SentenceTransformer('paraphrase-multilingual-MiniLM-L12-v2')
|
13 |
|
14 |
-
# بارگذاری داده (اختیاری)
|
15 |
with open("tiyam_qa_data.json", "r", encoding="utf-8") as f:
|
16 |
data = json.load(f)
|
17 |
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
PINECONE_INDEX_NAME = os.getenv("PINECONE_INDEX_NAME", "tiyam-chat")
|
22 |
|
23 |
-
|
24 |
-
pinecone.init(api_key=PINECONE_API_KEY, environment=PINECONE_ENVIRONMENT)
|
25 |
|
26 |
-
|
27 |
-
if
|
28 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
-
|
31 |
-
index = pinecone.Index(PINECONE_INDEX_NAME)
|
32 |
|
33 |
def retrieve_answer(query, threshold=0.65, top_k=1):
|
34 |
query_embedding = model.encode([query])[0]
|
|
|
3 |
import gradio as gr
|
4 |
import openai
|
5 |
from sentence_transformers import SentenceTransformer
|
6 |
+
from pinecone import Pinecone, ServerlessSpec
|
7 |
|
|
|
8 |
openai.api_key = os.getenv("openai")
|
9 |
|
|
|
10 |
model = SentenceTransformer('paraphrase-multilingual-MiniLM-L12-v2')
|
11 |
|
|
|
12 |
with open("tiyam_qa_data.json", "r", encoding="utf-8") as f:
|
13 |
data = json.load(f)
|
14 |
|
15 |
+
api_key = os.getenv("PINECONE_API_KEY")
|
16 |
+
region = os.getenv("PINECONE_ENVIRONMENT", "us-west1-gcp")
|
17 |
+
index_name = os.getenv("PINECONE_INDEX_NAME", "tiyam-chat")
|
|
|
18 |
|
19 |
+
pc = Pinecone(api_key=api_key)
|
|
|
20 |
|
21 |
+
existing_indexes = pc.list_indexes().names()
|
22 |
+
if index_name not in existing_indexes:
|
23 |
+
pc.create_index(
|
24 |
+
name=index_name,
|
25 |
+
dimension=384,
|
26 |
+
metric="cosine",
|
27 |
+
spec=ServerlessSpec(
|
28 |
+
cloud="aws",
|
29 |
+
region=region
|
30 |
+
)
|
31 |
+
)
|
32 |
|
33 |
+
index = pc.Index(index_name)
|
|
|
34 |
|
35 |
def retrieve_answer(query, threshold=0.65, top_k=1):
|
36 |
query_embedding = model.encode([query])[0]
|