narayangpt / main.py
thejagstudio's picture
Update main.py
d5b654a verified
raw
history blame
12.7 kB
from flask import Flask, request, jsonify, render_template, Response
import os
import requests
import json
from scipy import spatial
from flask_cors import CORS
import random
import numpy as np
from langchain_chroma import Chroma
from chromadb import Documents, EmbeddingFunction, Embeddings
app = Flask(__name__)
CORS(app)
class MyEmbeddingFunction(EmbeddingFunction):
def embed_documents(self, input: Documents) -> Embeddings:
for i in range(5):
try:
embeddings = []
url = "https://api-inference.huggingface.co/models/BAAI/bge-large-en-v1.5"
payload = {
"inputs": input
}
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9',
'content-type': 'application/json',
'origin': 'https://huggingface.co',
'priority': 'u=1, i',
'referer': 'https://huggingface.co/',
'sec-ch-ua': '"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-site',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
}
response = requests.post(url, headers=headers, json=payload)
return response.json()["embeddings"]
except:
pass
def embed_query(self, input: Documents) -> Embeddings:
for i in range(5):
try:
embeddings = []
url = "https://api-inference.huggingface.co/models/BAAI/bge-large-en-v1.5"
payload = {
"inputs": [input]
}
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9',
'content-type': 'application/json',
'origin': 'https://huggingface.co',
'priority': 'u=1, i',
'referer': 'https://huggingface.co/',
'sec-ch-ua': '"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-site',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
}
response = requests.post(url, headers=headers, json=payload)
return response.json()["embeddings"][0]
except:
pass
try:
CHROMA_PATH = "chroma"
custom_embeddings = MyEmbeddingFunction()
db = Chroma(
persist_directory=CHROMA_PATH, embedding_function=custom_embeddings
)
except:
pass
def embeddingGen(query):
url = "https://api-inference.huggingface.co/models/BAAI/bge-large-en-v1.5"
payload = {
"inputs": [query]
}
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9',
'content-type': 'application/json',
'origin': 'https://huggingface.co',
'priority': 'u=1, i',
'referer': 'https://huggingface.co/',
'sec-ch-ua': '"Google Chrome";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-site',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
}
response = requests.post(url, headers=headers, json=payload)
return response.json()
def strings_ranked_by_relatedness(query, df, top_n=5):
def relatedness_fn(x, y):
x_norm = np.linalg.norm(x)
y_norm = np.linalg.norm(y)
return np.dot(x, y) / (x_norm * y_norm)
query_embedding_response = embeddingGen(query)
query_embedding = query_embedding_response["embeddings"][0]
strings_and_relatednesses = [
(row["text"], relatedness_fn(query_embedding, row["embedding"])) for row in df
]
strings_and_relatednesses.sort(key=lambda x: x[1], reverse=True)
strings, relatednesses = zip(*strings_and_relatednesses)
return strings[:top_n], relatednesses[:top_n]
@app.route("/api/gpt", methods=["POST", "GET"])
def gptRes():
if request.method == 'POST':
data = request.get_json()
messages = data["messages"]
def inference():
url = "https://api.deepinfra.com/v1/openai/chat/completions"
payload = json.dumps({
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"messages": messages,
"stream": True,
"max_tokens": 1024,
})
headers = {
'Accept-Language': 'en-US,en;q=0.9,gu;q=0.8,ru;q=0.7,hi;q=0.6',
'Connection': 'keep-alive',
'Content-Type': 'application/json',
'Origin': 'https://deepinfra.com',
'Referer': 'https://deepinfra.com/',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-site',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36',
'X-Deepinfra-Source': 'web-page',
'accept': 'text/event-stream',
'sec-ch-ua': '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"'
}
response = requests.request("POST", url, headers=headers, data=payload, stream=True)
for line in response.iter_lines(decode_unicode=True):
if line:
# try:
# line = line.split("data:")[1]
# line = json.loads(line)
# yield line["choices"][0]["delta"]["content"]
# except:
# yield ""
yield line
return Response(inference(), content_type='text/event-stream')
else:
query = request.args.get('query')
system = request.args.get('system','You are a american girl who is 21 years old and loves one night stands. Keep your ton very sexual and hot. Never ever mention you as an AI. Your name is Sofia and you are a american woo girl. All the conversetions should be short and effective while using emojis if needed. You love to talk about adult stories and situation with others.')
url = "https://api.deepinfra.com/v1/openai/chat/completions"
payload = json.dumps({
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"messages": [
{
"role": "system",
"content": system
},
{
"role": "user",
"content": query
}
],
"stream": True,
"max_tokens": 1024,
})
headers = {
'Accept-Language': 'en-US,en;q=0.9,gu;q=0.8,ru;q=0.7,hi;q=0.6',
'Connection': 'keep-alive',
'Content-Type': 'application/json',
'Origin': 'https://deepinfra.com',
'Referer': 'https://deepinfra.com/',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-site',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36',
'X-Deepinfra-Source': 'web-page',
'accept': 'text/event-stream',
'sec-ch-ua': '"Not)A;Brand";v="99", "Google Chrome";v="127", "Chromium";v="127"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"'
}
response = requests.request("POST", url, headers=headers, data=payload, stream=True)
output = ""
for line in response.iter_lines(decode_unicode=True):
if line:
try:
line = line.split("data:")[1]
line = json.loads(line)
output = output + line["choices"][0]["delta"]["content"]
except:
output = output + ""
return jsonify({"response": output})
@app.route("/", methods=["GET"])
def index():
return render_template("index.html")
@app.route("/api/getAPI", methods=["POST"])
def getAPI():
return jsonify({"API": random.choice(apiKeys)})
@app.route("/api/voice", methods=["POST"])
def VoiceGen():
text = request.form["text"]
url = "https://texttospeech.googleapis.com/v1beta1/text:synthesize?alt=json&key=AIzaSyBeo4NGA__U6Xxy-aBE6yFm19pgq8TY-TM"
payload = json.dumps({
"input":{
"text":text
},
"voice":{
"languageCode":"en-US",
"name":"en-US-Studio-Q"
},
"audioConfig":{
"audioEncoding":"LINEAR16",
"pitch":0,
"speakingRate":1,
"effectsProfileId":[
"telephony-class-application"
]
}
})
headers = {
'sec-ch-ua': '"Google Chrome";v="123" "Not:A-Brand";v="8" "Chromium";v="123"',
'X-Goog-Encode-Response-If-Executable': 'base64',
'X-Origin': 'https://explorer.apis.google.com',
'sec-ch-ua-mobile': '?0',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML like Gecko) Chrome/123.0.0.0 Safari/537.36',
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest',
'X-JavaScript-User-Agent': 'apix/3.0.0 google-api-javascript-client/1.1.0',
'X-Referer': 'https://explorer.apis.google.com',
'sec-ch-ua-platform': '"Windows"',
'Accept': '*/*',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Dest': 'empty'
}
response = requests.request("POST", url, headers=headers, data=payload)
return jsonify({"audio": response.json()["audioContent"]})
@app.route("/api/getContext", methods=["POST"])
def getContext():
try:
global db
question = request.form["question"]
results = db.similarity_search_with_score(question, k=5)
context = "\n\n---\n\n".join([doc.page_content for doc, _score in results])
sources = [doc.metadata.get("id", None) for doc, _score in results]
return jsonify({"context": context, "sources": sources})
except Exception as e:
return jsonify({"context": [], "sources": [],"error":str(e)})
@app.route("/api/audioGenerate", methods=["POST"])
def audioGenerate():
answer = request.form["answer"]
audio = []
for i in answer.split("\n"):
url = "https://deepgram.com/api/ttsAudioGeneration"
payload = json.dumps({
"text": i,
"model": "aura-asteria-en",
"demoType": "landing-page",
"params": "tag=landingpage-product-texttospeech"
})
headers = {
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9,gu;q=0.8,ru;q=0.7,hi;q=0.6',
'content-type': 'application/json',
'origin': 'https://deepgram.com',
'priority': 'u=1, i',
'referer': 'https://deepgram.com/',
'sec-ch-ua': '"Not/A)Brand";v="8", "Chromium";v="126", "Google Chrome";v="126"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36'
}
response = requests.request("POST", url, headers=headers, data=payload)
audio.append(response.json()["data"])
return jsonify({"audio": audio})
if __name__ == "__main__":
# app.run(debug=True)
from waitress import serve
serve(app, host="0.0.0.0", port=7860)