warhawkmonk commited on
Commit
d536d6a
·
verified ·
1 Parent(s): e5f7f1b

Update data_collector.py

Browse files
Files changed (1) hide show
  1. data_collector.py +50 -50
data_collector.py CHANGED
@@ -6,64 +6,64 @@ from sentence_transformers import SentenceTransformer,util
6
  from transformers import pipeline
7
  import requests
8
 
9
- # def consume_llm_api(prompt):
10
- # """
11
- # Sends a prompt to the LLM API and processes the streamed response.
12
- # """
13
- # url = "https://8417-201-238-124-65.ngrok-free.app/api/llm-response"
14
- # headers = {"Content-Type": "application/json"}
15
- # payload = {"prompt": prompt,"extension":"1"}
16
 
17
 
18
- # print("Sending prompt to the LLM API...")
19
- # response_ = requests.post(url, json=payload,verify=False)
20
- # response_data = response_.json()
21
- # return response_data['text']
22
- def consume_llm_api(prompt):
23
 
24
- import requests
25
- from sentence_transformers import SentenceTransformer
26
- from pinecone import Pinecone, ServerlessSpec
27
- Gen_api = "https://8417-201-238-124-65.ngrok-free.app/api/llm-response"
28
- api_key = "pcsk_2EhvKP_GqGkpAjF4p4ziL7PgrgM9xuKcthX9gtqhyLxV3UaMmWTQufW4qKZrjhLrf2d1ma"
29
- pc = Pinecone(api_key=api_key)
30
- model = SentenceTransformer("all-mpnet-base-v2")
31
- try:
32
- index_name = "quickstart"
33
- pc.create_index(
34
- name=index_name,
35
- dimension=768,
36
- metric="cosine",
37
- spec=ServerlessSpec(
38
- cloud="aws",
39
- region="us-east-1"
40
- )
41
- )
42
- except:
43
- pass
44
- index = pc.Index(index_name)
45
- index.upsert(
46
- vectors=[
47
- {
48
- "id": "lorum",
49
- "values": [float(i) for i in list(model.encode("lorum"))],
50
- "metadata": {"string":str(prompt)}
51
 
52
- }
53
- ]
54
- )
55
 
56
- gen_api_response = requests.post(url = Gen_api,json={"api_key": api_key},verify=False)
57
 
58
- if gen_api_response.json().get("status"):
59
- response = index.query(
60
- vector=[float(i) for i in model.encode(str(prompt))],
61
- top_k=1,
62
- include_metadata=True,
63
- )
64
 
65
 
66
- return response['matches'][0]['metadata']['string']
67
 
68
 
69
  def relevent_value(long_query,count=3):
 
6
  from transformers import pipeline
7
  import requests
8
 
9
+ def consume_llm_api(prompt):
10
+ """
11
+ Sends a prompt to the LLM API and processes the streamed response.
12
+ """
13
+ url = "https://8417-201-238-124-65.ngrok-free.app/api/llm-response"
14
+ headers = {"Content-Type": "application/json"}
15
+ payload = {"prompt": prompt,"extension":"1"}
16
 
17
 
18
+ print("Sending prompt to the LLM API...")
19
+ response_ = requests.post(url, json=payload,verify=False)
20
+ response_data = response_.json()
21
+ return response_data['text']
22
+ # def consume_llm_api(prompt):
23
 
24
+ # import requests
25
+ # from sentence_transformers import SentenceTransformer
26
+ # from pinecone import Pinecone, ServerlessSpec
27
+ # Gen_api = "https://8417-201-238-124-65.ngrok-free.app/api/llm-response"
28
+ # api_key = "pcsk_2EhvKP_GqGkpAjF4p4ziL7PgrgM9xuKcthX9gtqhyLxV3UaMmWTQufW4qKZrjhLrf2d1ma"
29
+ # pc = Pinecone(api_key=api_key)
30
+ # model = SentenceTransformer("all-mpnet-base-v2")
31
+ # try:
32
+ # index_name = "quickstart"
33
+ # pc.create_index(
34
+ # name=index_name,
35
+ # dimension=768,
36
+ # metric="cosine",
37
+ # spec=ServerlessSpec(
38
+ # cloud="aws",
39
+ # region="us-east-1"
40
+ # )
41
+ # )
42
+ # except:
43
+ # pass
44
+ # index = pc.Index(index_name)
45
+ # index.upsert(
46
+ # vectors=[
47
+ # {
48
+ # "id": "lorum",
49
+ # "values": [float(i) for i in list(model.encode("lorum"))],
50
+ # "metadata": {"string":str(prompt)}
51
 
52
+ # }
53
+ # ]
54
+ # )
55
 
56
+ # gen_api_response = requests.post(url = Gen_api,json={"api_key": api_key},verify=False)
57
 
58
+ # if gen_api_response.json().get("status"):
59
+ # response = index.query(
60
+ # vector=[float(i) for i in model.encode(str(prompt))],
61
+ # top_k=1,
62
+ # include_metadata=True,
63
+ # )
64
 
65
 
66
+ # return response['matches'][0]['metadata']['string']
67
 
68
 
69
  def relevent_value(long_query,count=3):