|
import io |
|
from fastapi import FastAPI, File, UploadFile |
|
|
|
import subprocess |
|
import os |
|
import requests |
|
import random |
|
|
|
|
|
import shutil |
|
import json |
|
|
|
from pydantic import BaseModel |
|
from typing import Annotated |
|
|
|
from fastapi import Form |
|
|
|
|
|
import selenium |
|
|
|
from selenium import webdriver |
|
from selenium.webdriver import ChromeOptions |
|
from selenium.webdriver.chrome.service import Service |
|
import threading |
|
import random |
|
import string |
|
import time |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Query(BaseModel): |
|
text: str |
|
host:str |
|
|
|
|
|
|
|
|
|
from fastapi import FastAPI, Request, Depends, UploadFile, File |
|
from fastapi.exceptions import HTTPException |
|
from fastapi.middleware.cors import CORSMiddleware |
|
from fastapi.responses import JSONResponse |
|
|
|
|
|
app = FastAPI() |
|
|
|
app.add_middleware( |
|
CORSMiddleware, |
|
allow_origins=['*'], |
|
allow_credentials=True, |
|
allow_methods=['*'], |
|
allow_headers=['*'], |
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
from selenium.webdriver.common.by import By |
|
from pymongo.mongo_client import MongoClient |
|
|
|
@app.on_event("startup") |
|
async def startup_event(): |
|
print("on startup") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/llama") |
|
async def get_answer_llama(request: Request ): |
|
data = await request.json() |
|
text = data['text'] |
|
print("recived ",text) |
|
res= do_ML_LLAMA(text,0) |
|
|
|
dict={"LLAMA":res} |
|
|
|
return JSONResponse(dict) |
|
|
|
|
|
|
|
|
|
|
|
|
|
def do_ML_LLAMA(text:str, trycount:int): |
|
starttime=time.time() |
|
options = ChromeOptions() |
|
options.add_argument('--no-sandbox') |
|
options.add_argument('-headless') |
|
options.add_argument("start-maximized") |
|
service = Service() |
|
driver = webdriver.Chrome(options= options,service=service) |
|
driver.get("https://ysharma-explore-llamav2-with-tgi.hf.space/") |
|
try: |
|
|
|
while True: |
|
currtime= time.time() |
|
if(currtime>starttime+10): |
|
return "Requested Could not be proceed" |
|
try: |
|
textarea_xpath = "//textarea[@data-testid='textbox' and @class='scroll-hide svelte-1kcgrqr' and @dir='ltr' and @placeholder='Type a message...']" |
|
textarea_element = driver.find_element(By.XPATH,textarea_xpath) |
|
|
|
textarea_element.send_keys(text) |
|
|
|
button_id = 'component-8' |
|
button_element = driver.find_element(By.ID,button_id) |
|
|
|
button_element.click() |
|
|
|
break |
|
except Exception as e: |
|
print(e) |
|
time.sleep(0.2) |
|
|
|
|
|
prev ="" |
|
|
|
|
|
while True: |
|
time.sleep(0.5) |
|
|
|
|
|
if(currtime>starttime+120): |
|
driver.delete_all_cookies() |
|
driver.quit() |
|
|
|
return "Requested Could not be proceed" |
|
|
|
|
|
try: |
|
element = driver.find_element(By.XPATH,'//div[@data-testid="bot" and contains(@class, "message bot")]') |
|
print(element.text) |
|
|
|
text = element.text |
|
print("text from llama",text) |
|
if prev!="": |
|
|
|
try: |
|
element_translucent = driver.find_element(By.XPATH,'//div[contains(@class, "translucent") and contains(@class, "generating")]') |
|
continue |
|
except: |
|
pass |
|
|
|
driver.delete_all_cookies() |
|
driver.quit() |
|
return text |
|
prev= text |
|
continue |
|
|
|
|
|
|
|
|
|
except Exception as e: |
|
print(e) |
|
continue |
|
|
|
|
|
|
|
|
|
return " --Error Occurred-- " |
|
|
|
|
|
|
|
except: |
|
print("Error") |
|
|
|
if trycount>1: |
|
|
|
return |
|
driver.quit() |
|
return do_ML_LLAMA(text,trycount+1) |
|
|
|
|
|
|
|
|
|
|
|
|
|
@app.post("/mpt") |
|
async def get_answer_mpt(request: Request ): |
|
data = await request.json() |
|
text = data['text'] |
|
print("recived ",text) |
|
res= do_ML_MPT(text,0) |
|
|
|
dict={"MPT":res} |
|
|
|
return JSONResponse(dict) |
|
|
|
|
|
|
|
|
|
|
|
|
|
def do_ML_MPT(text:str, trycount:int): |
|
starttime=time.time() |
|
options = ChromeOptions() |
|
options.add_argument('--no-sandbox') |
|
options.add_argument('-headless') |
|
service = Service() |
|
driver = webdriver.Chrome(options= options,service=service) |
|
driver.get("https://mosaicml-mpt-30b-chat.hf.space") |
|
try: |
|
|
|
while True: |
|
currtime= time.time() |
|
if(currtime>starttime+10): |
|
return "Requested Could not be proceed" |
|
try: |
|
textarea_xpath = "//textarea[@data-testid='textbox' and @class='scroll-hide svelte-1pie7s6' and @placeholder='Chat Message Box']" |
|
textarea_element = driver.find_element(By.XPATH,textarea_xpath) |
|
|
|
textarea_element.send_keys(text) |
|
|
|
button_id = 'component-9' |
|
button_element = driver.find_element(By.ID,button_id) |
|
|
|
|
|
button_element.click() |
|
break |
|
except Exception as e: |
|
print(e) |
|
time.sleep(0.2) |
|
|
|
|
|
prev ="" |
|
|
|
|
|
while True: |
|
time.sleep(0.5) |
|
|
|
|
|
if(currtime>starttime+120): |
|
driver.delete_all_cookies() |
|
driver.quit() |
|
|
|
return "Requested Could not be proceed" |
|
|
|
|
|
try: |
|
element = driver.find_element(By.XPATH,'//div[@data-testid="bot" and contains(@class, "message bot")]') |
|
|
|
x=(element.text) |
|
|
|
print("From text ",x) |
|
if x=="": |
|
raise ValueError(" k") |
|
driver.quit() |
|
return x |
|
|
|
|
|
|
|
|
|
except Exception as e: |
|
print(e) |
|
continue |
|
|
|
|
|
|
|
driver.quit() |
|
return " --Error Occurred-- " |
|
|
|
|
|
|
|
except: |
|
print("Error") |
|
|
|
if trycount>1: |
|
|
|
return |
|
driver.quit() |
|
return do_ML_MPT(text,trycount+1) |
|
|
|
|
|
|
|
|
|
|
|
|